From a284d376a6c0f86a0db61810dff7635d41ced960 Mon Sep 17 00:00:00 2001 From: aws-sdk-go-automation <43143561+aws-sdk-go-automation@users.noreply.github.com> Date: Fri, 24 Mar 2023 11:24:15 -0700 Subject: [PATCH] Release v1.44.229 (2023-03-24) (#4773) Release v1.44.229 (2023-03-24) === ### Service Client Updates * `service/comprehend`: Updates service API and documentation * `service/monitoring`: Updates service documentation * Doc-only update to correct alarm actions list * `service/rds`: Updates service API, documentation, waiters, paginators, and examples * Added error code CreateCustomDBEngineVersionFault for when the create custom engine version for Custom engines fails. --- CHANGELOG.md | 10 + aws/endpoints/defaults.go | 25 + aws/version.go | 2 +- models/apis/comprehend/2017-11-27/api-2.json | 6 +- models/apis/comprehend/2017-11-27/docs-2.json | 205 +-- .../2017-11-27/endpoint-tests-1.json | 180 +- models/apis/monitoring/2010-08-01/docs-2.json | 24 +- .../2010-08-01/endpoint-rule-set-1.json | 412 +++-- .../2010-08-01/endpoint-tests-1.json | 1573 +++-------------- models/apis/rds/2014-10-31/api-2.json | 14 +- models/apis/rds/2014-10-31/docs-2.json | 39 +- .../apis/rds/2014-10-31/endpoint-tests-1.json | 146 +- models/endpoints/endpoints.json | 11 + service/cloudwatch/api.go | 145 +- service/comprehend/api.go | 518 +++--- service/comprehend/doc.go | 8 +- service/rds/api.go | 144 +- service/rds/errors.go | 6 + 18 files changed, 1300 insertions(+), 2168 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e350d64b9e..25b95ba09f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +Release v1.44.229 (2023-03-24) +=== + +### Service Client Updates +* `service/comprehend`: Updates service API and documentation +* `service/monitoring`: Updates service documentation + * Doc-only update to correct alarm actions list +* `service/rds`: Updates service API, documentation, waiters, paginators, and examples + * Added error code CreateCustomDBEngineVersionFault for when the create custom engine version for Custom engines fails. + Release v1.44.228 (2023-03-23) === diff --git a/aws/endpoints/defaults.go b/aws/endpoints/defaults.go index cbd98b571bd..d3789cce827 100644 --- a/aws/endpoints/defaults.go +++ b/aws/endpoints/defaults.go @@ -14341,6 +14341,31 @@ var awsPartition = partition{ }: endpoint{}, }, }, + "ivsrealtime": service{ + Endpoints: serviceEndpoints{ + endpointKey{ + Region: "ap-northeast-1", + }: endpoint{}, + endpointKey{ + Region: "ap-northeast-2", + }: endpoint{}, + endpointKey{ + Region: "ap-south-1", + }: endpoint{}, + endpointKey{ + Region: "eu-central-1", + }: endpoint{}, + endpointKey{ + Region: "eu-west-1", + }: endpoint{}, + endpointKey{ + Region: "us-east-1", + }: endpoint{}, + endpointKey{ + Region: "us-west-2", + }: endpoint{}, + }, + }, "kafka": service{ Endpoints: serviceEndpoints{ endpointKey{ diff --git a/aws/version.go b/aws/version.go index f61d223cc0a..55e65f65519 100644 --- a/aws/version.go +++ b/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.44.228" +const SDKVersion = "1.44.229" diff --git a/models/apis/comprehend/2017-11-27/api-2.json b/models/apis/comprehend/2017-11-27/api-2.json index fede2943c8c..6857e61da83 100644 --- a/models/apis/comprehend/2017-11-27/api-2.json +++ b/models/apis/comprehend/2017-11-27/api-2.json @@ -2702,7 +2702,8 @@ "LanguageCode":{"shape":"LanguageCode"}, "DataAccessRoleArn":{"shape":"IamRoleArn"}, "VolumeKmsKeyId":{"shape":"KmsKeyId"}, - "VpcConfig":{"shape":"VpcConfig"} + "VpcConfig":{"shape":"VpcConfig"}, + "FlywheelArn":{"shape":"ComprehendFlywheelArn"} } }, "EntitiesDetectionJobPropertiesList":{ @@ -3690,7 +3691,8 @@ "STOP_REQUESTED", "STOPPED", "IN_ERROR", - "TRAINED" + "TRAINED", + "TRAINED_WITH_WARNING" ] }, "ModelType":{ diff --git a/models/apis/comprehend/2017-11-27/docs-2.json b/models/apis/comprehend/2017-11-27/docs-2.json index 3cdb9d2b93e..2ddba539845 100644 --- a/models/apis/comprehend/2017-11-27/docs-2.json +++ b/models/apis/comprehend/2017-11-27/docs-2.json @@ -1,6 +1,6 @@ { "version": "2.0", - "service": "

Amazon Comprehend is an AWS service for gaining insight into the content of documents. Use these actions to determine the topics contained in your documents, the topics they discuss, the predominant sentiment expressed in them, the predominant language used, and more.

", + "service": "

Amazon Comprehend is an Amazon Web Services service for gaining insight into the content of documents. Use these actions to determine the topics contained in your documents, the topics they discuss, the predominant sentiment expressed in them, the predominant language used, and more.

", "operations": { "BatchDetectDominantLanguage": "

Determines the dominant language of the input text for a batch of documents. For a list of languages that Amazon Comprehend can detect, see Amazon Comprehend Supported Languages.

", "BatchDetectEntities": "

Inspects the text of a batch of documents for named entities and returns information about them. For more information about named entities, see Entities in the Comprehend Developer Guide.

", @@ -14,7 +14,7 @@ "CreateDocumentClassifier": "

Creates a new document classifier that you can use to categorize documents. To create a classifier, you provide a set of training documents that labeled with the categories that you want to use. After the classifier is trained you can use it to categorize a set of labeled documents into the categories. For more information, see Document Classification in the Comprehend Developer Guide.

", "CreateEndpoint": "

Creates a model-specific endpoint for synchronous inference for a previously trained custom model For information about endpoints, see Managing endpoints.

", "CreateEntityRecognizer": "

Creates an entity recognizer using submitted files. After your CreateEntityRecognizer request is submitted, you can check job status using the DescribeEntityRecognizer API.

", - "CreateFlywheel": "

A flywheel is an AWS resource that orchestrates the ongoing training of a model for custom classification or custom entity recognition. You can create a flywheel to start with an existing trained model, or Comprehend can create and train a new model.

When you create the flywheel, Comprehend creates a data lake in your account. The data lake holds the training data and test data for all versions of the model.

To use a flywheel with an existing trained model, you specify the active model version. Comprehend copies the model's training data and test data into the flywheel's data lake.

To use the flywheel with a new model, you need to provide a dataset for training data (and optional test data) when you create the flywheel.

For more information about flywheels, see Flywheel overview in the Amazon Comprehend Developer Guide.

", + "CreateFlywheel": "

A flywheel is an Amazon Web Services resource that orchestrates the ongoing training of a model for custom classification or custom entity recognition. You can create a flywheel to start with an existing trained model, or Comprehend can create and train a new model.

When you create the flywheel, Comprehend creates a data lake in your account. The data lake holds the training data and test data for all versions of the model.

To use a flywheel with an existing trained model, you specify the active model version. Comprehend copies the model's training data and test data into the flywheel's data lake.

To use the flywheel with a new model, you need to provide a dataset for training data (and optional test data) when you create the flywheel.

For more information about flywheels, see Flywheel overview in the Amazon Comprehend Developer Guide.

", "DeleteDocumentClassifier": "

Deletes a previously created document classifier

Only those classifiers that are in terminated states (IN_ERROR, TRAINED) will be deleted. If an active inference job is using the model, a ResourceInUseException will be returned.

This is an asynchronous action that puts the classifier into a DELETING state, and it is then removed by a background job. Once removed, the classifier disappears from your account and is no longer available for use.

", "DeleteEndpoint": "

Deletes a model-specific endpoint for a previously-trained custom model. All endpoints must be deleted in order for the model to be deleted. For information about endpoints, see Managing endpoints.

", "DeleteEntityRecognizer": "

Deletes an entity recognizer.

Only those recognizers that are in terminated states (IN_ERROR, TRAINED) will be deleted. If an active inference job is using the model, a ResourceInUseException will be returned.

This is an asynchronous action that puts the recognizer into a DELETING state, and it is then removed by a background job. Once removed, the recognizer disappears from your account and is no longer available for use.

", @@ -43,8 +43,8 @@ "DetectSentiment": "

Inspects text and returns an inference of the prevailing sentiment (POSITIVE, NEUTRAL, MIXED, or NEGATIVE).

", "DetectSyntax": "

Inspects text for syntax and the part of speech of words in the document. For more information, see Syntax in the Comprehend Developer Guide.

", "DetectTargetedSentiment": "

Inspects the input text and returns a sentiment analysis for each entity identified in the text.

For more information about targeted sentiment, see Targeted sentiment.

", - "ImportModel": "

Creates a new custom model that replicates a source custom model that you import. The source model can be in your AWS account or another one.

If the source model is in another AWS account, then it must have a resource-based policy that authorizes you to import it.

The source model must be in the same AWS region that you're using when you import. You can't import a model that's in a different region.

", - "ListDatasets": "

List the datasets that you have configured in this region. For more information about datasets, see Flywheel overview in the Amazon Comprehend Developer Guide.

", + "ImportModel": "

Creates a new custom model that replicates a source custom model that you import. The source model can be in your Amazon Web Services account or another one.

If the source model is in another Amazon Web Services account, then it must have a resource-based policy that authorizes you to import it.

The source model must be in the same Amazon Web Services Region that you're using when you import. You can't import a model that's in a different Region.

", + "ListDatasets": "

List the datasets that you have configured in this Region. For more information about datasets, see Flywheel overview in the Amazon Comprehend Developer Guide.

", "ListDocumentClassificationJobs": "

Gets a list of the documentation classification jobs that you have submitted.

", "ListDocumentClassifierSummaries": "

Gets a list of summaries of the document classifiers that you have created

", "ListDocumentClassifiers": "

Gets a list of the document classifiers that you have created.

", @@ -62,7 +62,7 @@ "ListTagsForResource": "

Lists all tags associated with a given Amazon Comprehend resource.

", "ListTargetedSentimentDetectionJobs": "

Gets a list of targeted sentiment detection jobs that you have submitted.

", "ListTopicsDetectionJobs": "

Gets a list of the topic detection jobs that you have submitted.

", - "PutResourcePolicy": "

Attaches a resource-based policy to a custom model. You can use this policy to authorize an entity in another AWS account to import the custom model, which replicates it in Amazon Comprehend in their account.

", + "PutResourcePolicy": "

Attaches a resource-based policy to a custom model. You can use this policy to authorize an entity in another Amazon Web Services account to import the custom model, which replicates it in Amazon Comprehend in their account.

", "StartDocumentClassificationJob": "

Starts an asynchronous document classification job. Use the DescribeDocumentClassificationJob operation to track the progress of the job.

", "StartDominantLanguageDetectionJob": "

Starts an asynchronous dominant language detection job for a collection of documents. Use the operation to track the status of a job.

", "StartEntitiesDetectionJob": "

Starts an asynchronous entity detection job for a collection of documents. Use the operation to track the status of a job.

This API can be used for either standard entity detection or custom entity recognition. In order to be used for custom entity recognition, the optional EntityRecognizerArn must be used in order to provide access to the recognizer being used to detect the custom entity.

", @@ -330,27 +330,27 @@ "ComprehendArn": { "base": null, "refs": { - "DocumentClassificationJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the document classification job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:document-classification-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", - "DominantLanguageDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the dominant language detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:dominant-language-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:dominant-language-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "EntitiesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the entities detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "EventsDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the events detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:events-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:events-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "KeyPhrasesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the key phrases detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:key-phrases-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:key-phrases-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "DocumentClassificationJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the document classification job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:document-classification-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", + "DominantLanguageDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the dominant language detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:dominant-language-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:dominant-language-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "EntitiesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the entities detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "EventsDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the events detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:events-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:events-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "KeyPhrasesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the key phrases detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:key-phrases-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:key-phrases-detection-job/1234abcd12ab34cd56ef1234567890ab

", "ListTagsForResourceRequest$ResourceArn": "

The Amazon Resource Name (ARN) of the given Amazon Comprehend resource you are querying.

", "ListTagsForResourceResponse$ResourceArn": "

The Amazon Resource Name (ARN) of the given Amazon Comprehend resource you are querying.

", - "PiiEntitiesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the PII entities detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:pii-entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:pii-entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "SentimentDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartDocumentClassificationJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the document classification job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:document-classification-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", - "StartDominantLanguageDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the dominant language detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:dominant-language-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:dominant-language-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartEntitiesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the entities detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartEventsDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the events detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:events-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:events-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartKeyPhrasesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the key phrase detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:key-phrases-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:key-phrases-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartPiiEntitiesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the PII entity detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:pii-entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:pii-entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartSentimentDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartTargetedSentimentDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the targeted sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:targeted-sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:targeted-sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "StartTopicsDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the topics detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:topics-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", + "PiiEntitiesDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the PII entities detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:pii-entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:pii-entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "SentimentDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartDocumentClassificationJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the document classification job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:document-classification-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", + "StartDominantLanguageDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the dominant language detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:dominant-language-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:dominant-language-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartEntitiesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the entities detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartEventsDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the events detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:events-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:events-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartKeyPhrasesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the key phrase detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:key-phrases-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:key-phrases-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartPiiEntitiesDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the PII entity detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:pii-entities-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:pii-entities-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartSentimentDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartTargetedSentimentDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the targeted sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:targeted-sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:targeted-sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "StartTopicsDetectionJobResponse$JobArn": "

The Amazon Resource Name (ARN) of the topics detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:topics-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:document-classification-job/1234abcd12ab34cd56ef1234567890ab

", "TagResourceRequest$ResourceArn": "

The Amazon Resource Name (ARN) of the given Amazon Comprehend resource to which you want to associate the tags.

", - "TargetedSentimentDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the targeted sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:targeted-sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:targeted-sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", - "TopicsDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the topics detection job. It is a unique, fully qualified identifier for the job. It includes the AWS account, Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:topics-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:topics-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "TargetedSentimentDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the targeted sentiment detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:targeted-sentiment-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:targeted-sentiment-detection-job/1234abcd12ab34cd56ef1234567890ab

", + "TopicsDetectionJobProperties$JobArn": "

The Amazon Resource Name (ARN) of the topics detection job. It is a unique, fully qualified identifier for the job. It includes the Amazon Web Services account, Amazon Web Services Region, and the job ID. The format of the ARN is as follows:

arn:<partition>:comprehend:<region>:<account-id>:topics-detection-job/<job-id>

The following is an example job ARN:

arn:aws:comprehend:us-west-2:111122223333:topics-detection-job/1234abcd12ab34cd56ef1234567890ab

", "UntagResourceRequest$ResourceArn": "

The Amazon Resource Name (ARN) of the given Amazon Comprehend resource from which you want to remove the tags.

" } }, @@ -359,7 +359,7 @@ "refs": { "CreateDatasetRequest$DatasetName": "

Name of the dataset.

", "CreateDocumentClassifierRequest$DocumentClassifierName": "

The name of the document classifier.

", - "CreateEntityRecognizerRequest$RecognizerName": "

The name given to the newly created recognizer. Recognizer names can be a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The name must be unique in the account/region.

", + "CreateEntityRecognizerRequest$RecognizerName": "

The name given to the newly created recognizer. Recognizer names can be a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The name must be unique in the account/Region.

", "CreateFlywheelRequest$FlywheelName": "

Name for the flywheel.

", "DatasetProperties$DatasetName": "

The name of the dataset.

", "DocumentClassifierFilter$DocumentClassifierName": "

The name that you assigned to the document classifier

", @@ -405,6 +405,7 @@ "DocumentClassificationJobProperties$FlywheelArn": "

The Amazon Resource Number (ARN) of the flywheel

", "DocumentClassifierProperties$FlywheelArn": "

The Amazon Resource Number (ARN) of the flywheel

", "EndpointProperties$FlywheelArn": "

The Amazon Resource Number (ARN) of the flywheel

", + "EntitiesDetectionJobProperties$FlywheelArn": "

The Amazon Resource Name (ARN) of the flywheel associated with this job.

", "EntityRecognizerProperties$FlywheelArn": "

The Amazon Resource Number (ARN) of the flywheel

", "FlywheelIterationProperties$FlywheelArn": "

", "FlywheelProperties$FlywheelArn": "

The Amazon Resource Number (ARN) of the flywheel.

", @@ -516,7 +517,7 @@ "DetectDominantLanguageRequest$Text": "

A UTF-8 text string. The string must contain at least 20 characters. The maximum string size is 100 KB.

", "DetectEntitiesRequest$Text": "

A UTF-8 text string. The maximum string size is 100 KB. If you enter text using this parameter, do not use the Bytes parameter.

", "DetectKeyPhrasesRequest$Text": "

A UTF-8 text string. The string must contain less than 100 KB of UTF-8 encoded characters.

", - "DetectSentimentRequest$Text": "

A UTF-8 text string. The maximum string size is 5 KB.

Amazon Comprehend performs real-time sentiment analysis on the first 500 characters of the input text and ignores any additional text in the input.

", + "DetectSentimentRequest$Text": "

A UTF-8 text string. The maximum string size is 5 KB.

", "DetectSyntaxRequest$Text": "

A UTF-8 string. The maximum string size is 5 KB.

", "DetectTargetedSentimentRequest$Text": "

A UTF-8 text string. The maximum string length is 5 KB.

" } @@ -527,7 +528,7 @@ "BatchDetectDominantLanguageRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. Each document should contain at least 20 characters. The maximum size of each document is 5 KB.

", "BatchDetectEntitiesRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size of each document is 5 KB.

", "BatchDetectKeyPhrasesRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size of each document is 5 KB.

", - "BatchDetectSentimentRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size of each document is 5 KB.

Amazon Comprehend performs real-time sentiment analysis on the first 500 characters of the input text and ignores any additional text in the input.

", + "BatchDetectSentimentRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size of each document is 5 KB.

", "BatchDetectSyntaxRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size for each document is 5 KB.

", "BatchDetectTargetedSentimentRequest$TextList": "

A list containing the UTF-8 encoded text of the input documents. The list can contain a maximum of 25 documents. The maximum size of each document is 5 KB.

" } @@ -953,7 +954,7 @@ "DescribeDocumentClassifierRequest$DocumentClassifierArn": "

The Amazon Resource Name (ARN) that identifies the document classifier. The CreateDocumentClassifier operation returns this identifier in its response.

", "DocumentClassificationJobProperties$DocumentClassifierArn": "

The Amazon Resource Name (ARN) that identifies the document classifier.

", "DocumentClassifierProperties$DocumentClassifierArn": "

The Amazon Resource Name (ARN) that identifies the document classifier.

", - "DocumentClassifierProperties$SourceModelArn": "

The Amazon Resource Name (ARN) of the source model. This model was imported from a different AWS account to create the document classifier model in your AWS account.

", + "DocumentClassifierProperties$SourceModelArn": "

The Amazon Resource Name (ARN) of the source model. This model was imported from a different Amazon Web Services account to create the document classifier model in your Amazon Web Services account.

", "StartDocumentClassificationJobRequest$DocumentClassifierArn": "

The Amazon Resource Name (ARN) of the document classifier to use to process the job.

", "StartDocumentClassificationJobResponse$DocumentClassifierArn": "

The ARN of the custom classification model.

", "StopTrainingDocumentClassifierRequest$DocumentClassifierArn": "

The Amazon Resource Name (ARN) that identifies the document classifier currently being trained.

" @@ -1206,7 +1207,7 @@ "DescribeEntityRecognizerRequest$EntityRecognizerArn": "

The Amazon Resource Name (ARN) that identifies the entity recognizer.

", "EntitiesDetectionJobProperties$EntityRecognizerArn": "

The Amazon Resource Name (ARN) that identifies the entity recognizer.

", "EntityRecognizerProperties$EntityRecognizerArn": "

The Amazon Resource Name (ARN) that identifies the entity recognizer.

", - "EntityRecognizerProperties$SourceModelArn": "

The Amazon Resource Name (ARN) of the source model. This model was imported from a different AWS account to create the entity recognizer model in your AWS account.

", + "EntityRecognizerProperties$SourceModelArn": "

The Amazon Resource Name (ARN) of the source model. This model was imported from a different Amazon Web Services account to create the entity recognizer model in your Amazon Web Services account.

", "StartEntitiesDetectionJobRequest$EntityRecognizerArn": "

The Amazon Resource Name (ARN) that identifies the specific entity recognizer to be used by the StartEntitiesDetectionJob. This ARN is optional and is only used for a custom entity recognition job.

", "StartEntitiesDetectionJobResponse$EntityRecognizerArn": "

The ARN of the custom entity recognition model.

", "StopTrainingEntityRecognizerRequest$EntityRecognizerArn": "

The Amazon Resource Name (ARN) that identifies the entity recognizer currently being trained.

" @@ -1257,7 +1258,7 @@ "EntityRecognizerInputDataConfig": { "base": "

Specifies the format and location of the input data.

", "refs": { - "CreateEntityRecognizerRequest$InputDataConfig": "

Specifies the format and location of the input data. The S3 bucket containing the input data must be located in the same region as the entity recognizer being created.

", + "CreateEntityRecognizerRequest$InputDataConfig": "

Specifies the format and location of the input data. The S3 bucket containing the input data must be located in the same Region as the entity recognizer being created.

", "EntityRecognizerProperties$InputDataConfig": "

The input data properties of an entity recognizer.

" } }, @@ -1493,36 +1494,36 @@ "IamRoleArn": { "base": null, "refs": { - "CreateDocumentClassifierRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "CreateEndpointRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to trained custom models encrypted with a customer managed key (ModelKmsKeyId).

", - "CreateEntityRecognizerRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "CreateFlywheelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend the permissions required to access the flywheel data in the data lake.

", - "DocumentClassificationJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "DocumentClassifierProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "DominantLanguageDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "EndpointProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to trained custom models encrypted with a customer managed key (ModelKmsKeyId).

", + "CreateDocumentClassifierRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "CreateEndpointRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to trained custom models encrypted with a customer managed key (ModelKmsKeyId).

", + "CreateEntityRecognizerRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "CreateFlywheelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend the permissions required to access the flywheel data in the data lake.

", + "DocumentClassificationJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "DocumentClassifierProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "DominantLanguageDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "EndpointProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to trained custom models encrypted with a customer managed key (ModelKmsKeyId).

", "EndpointProperties$DesiredDataAccessRoleArn": "

Data access role ARN to use in case the new model is encrypted with a customer KMS key.

", - "EntitiesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "EntityRecognizerProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "EventsDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "FlywheelProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend permission to access the flywheel data.

", - "ImportModelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend permission to use Amazon Key Management Service (KMS) to encrypt or decrypt the custom model.

", - "KeyPhrasesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "PiiEntitiesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "SentimentDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "StartDocumentClassificationJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "StartDominantLanguageDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions.

", - "StartEntitiesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions.

", - "StartEventsDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "StartKeyPhrasesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions.

", - "StartPiiEntitiesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "StartSentimentDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions.

", - "StartTargetedSentimentDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", - "StartTopicsDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data. For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions.

", - "TargetedSentimentDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your input data.

", - "TopicsDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend read access to your job data.

", + "EntitiesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "EntityRecognizerProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "EventsDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "FlywheelProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend permission to access the flywheel data.

", + "ImportModelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend permission to use Amazon Key Management Service (KMS) to encrypt or decrypt the custom model.

", + "KeyPhrasesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "PiiEntitiesDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "SentimentDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "StartDocumentClassificationJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "StartDominantLanguageDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "StartEntitiesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "StartEventsDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "StartKeyPhrasesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "StartPiiEntitiesDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "StartSentimentDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "StartTargetedSentimentDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "StartTopicsDetectionJobRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data. For more information, see Role-based permissions.

", + "TargetedSentimentDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your input data.

", + "TopicsDetectionJobProperties$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend read access to your job data.

", "UpdateEndpointRequest$DesiredDataAccessRoleArn": "

Data access role ARN to use in case the new model is encrypted with a customer CMK.

", - "UpdateFlywheelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that grants Amazon Comprehend permission to access the flywheel data.

" + "UpdateFlywheelRequest$DataAccessRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend permission to access the flywheel data.

" } }, "ImportModelRequest": { @@ -1803,36 +1804,36 @@ "KmsKeyId": { "base": null, "refs": { - "CreateDocumentClassifierRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "CreateDocumentClassifierRequest$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", - "CreateEntityRecognizerRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "CreateEntityRecognizerRequest$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", - "DataSecurityConfig$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "CreateDocumentClassifierRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "CreateDocumentClassifierRequest$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "CreateEntityRecognizerRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "CreateEntityRecognizerRequest$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "DataSecurityConfig$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", "DataSecurityConfig$VolumeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt the volume.

", "DataSecurityConfig$DataLakeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt the data in the data lake.

", - "DocumentClassificationJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "DocumentClassifierOutputDataConfig$KmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId can be one of the following formats:

", - "DocumentClassifierProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "DocumentClassifierProperties$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", - "DominantLanguageDetectionJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "EntitiesDetectionJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "EntityRecognizerProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "EntityRecognizerProperties$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", - "ImportModelRequest$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", - "KeyPhrasesDetectionJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "OutputDataConfig$KmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId can be one of the following formats:

", - "PiiOutputDataConfig$KmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job.

", - "SentimentDetectionJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartDocumentClassificationJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartDominantLanguageDetectionJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartEntitiesDetectionJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartKeyPhrasesDetectionJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartSentimentDetectionJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "DocumentClassificationJobProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "DocumentClassifierOutputDataConfig$KmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId can be one of the following formats:

", + "DocumentClassifierProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "DocumentClassifierProperties$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "DominantLanguageDetectionJobProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "EntitiesDetectionJobProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "EntityRecognizerProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "EntityRecognizerProperties$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "ImportModelRequest$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "KeyPhrasesDetectionJobProperties$VolumeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "OutputDataConfig$KmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId can be one of the following formats:

", + "PiiOutputDataConfig$KmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt the output results from an analysis job.

", + "SentimentDetectionJobProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartDocumentClassificationJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartDominantLanguageDetectionJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartEntitiesDetectionJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartKeyPhrasesDetectionJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartSentimentDetectionJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", "StartTargetedSentimentDetectionJobRequest$VolumeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "StartTopicsDetectionJobRequest$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "StartTopicsDetectionJobRequest$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", "TargetedSentimentDetectionJobProperties$VolumeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt the data on the storage volume attached to the ML compute instance(s) that process the targeted sentiment detection job. The VolumeKmsKeyId can be either of the following formats:

", - "TopicsDetectionJobProperties$VolumeKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", - "UpdateDataSecurityConfig$ModelKmsKeyId": "

ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", + "TopicsDetectionJobProperties$VolumeKmsKeyId": "

ID for the Amazon Web Services Key Management Service (KMS) key that Amazon Comprehend uses to encrypt data on the storage volume attached to the ML compute instance(s) that process the analysis job. The VolumeKmsKeyId can be either of the following formats:

", + "UpdateDataSecurityConfig$ModelKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt trained custom models. The ModelKmsKeyId can be either of the following formats:

", "UpdateDataSecurityConfig$VolumeKmsKeyId": "

ID for the KMS key that Amazon Comprehend uses to encrypt the volume.

" } }, @@ -2105,7 +2106,7 @@ "base": null, "refs": { "BatchDetectDominantLanguageItemResult$Languages": "

One or more DominantLanguage objects describing the dominant languages in the document.

", - "DetectDominantLanguageResponse$Languages": "

The languages that Amazon Comprehend detected in the input text. For each language, the response returns the RFC 5646 language code and the level of confidence that Amazon Comprehend has in the accuracy of its inference. For more information about RFC 5646, see Tags for Identifying Languages on the IETF Tools web site.

" + "DetectDominantLanguageResponse$Languages": "

Array of languages that Amazon Comprehend detected in the input text. The array is sorted in descending order of the score (the dominant language is always the first element in the array).

For each language, the response returns the RFC 5646 language code and the level of confidence that Amazon Comprehend has in the accuracy of its inference. For more information about RFC 5646, see Tags for Identifying Languages on the IETF Tools web site.

" } }, "ListOfEntities": { @@ -2401,8 +2402,8 @@ "Policy": { "base": null, "refs": { - "CreateDocumentClassifierRequest$ModelPolicy": "

The resource-based policy to attach to your custom document classifier model. You can use this policy to allow another AWS account to import your custom model.

Provide your policy as a JSON body that you enter as a UTF-8 encoded string without line breaks. To provide valid JSON, enclose the attribute names and values in double quotes. If the JSON body is also enclosed in double quotes, then you must escape the double quotes that are inside the policy:

\"{\\\"attribute\\\": \\\"value\\\", \\\"attribute\\\": [\\\"value\\\"]}\"

To avoid escaping quotes, you can use single quotes to enclose the policy and double quotes to enclose the JSON names and values:

'{\"attribute\": \"value\", \"attribute\": [\"value\"]}'

", - "CreateEntityRecognizerRequest$ModelPolicy": "

The JSON resource-based policy to attach to your custom entity recognizer model. You can use this policy to allow another AWS account to import your custom model.

Provide your JSON as a UTF-8 encoded string without line breaks. To provide valid JSON for your policy, enclose the attribute names and values in double quotes. If the JSON body is also enclosed in double quotes, then you must escape the double quotes that are inside the policy:

\"{\\\"attribute\\\": \\\"value\\\", \\\"attribute\\\": [\\\"value\\\"]}\"

To avoid escaping quotes, you can use single quotes to enclose the policy and double quotes to enclose the JSON names and values:

'{\"attribute\": \"value\", \"attribute\": [\"value\"]}'

", + "CreateDocumentClassifierRequest$ModelPolicy": "

The resource-based policy to attach to your custom document classifier model. You can use this policy to allow another Amazon Web Services account to import your custom model.

Provide your policy as a JSON body that you enter as a UTF-8 encoded string without line breaks. To provide valid JSON, enclose the attribute names and values in double quotes. If the JSON body is also enclosed in double quotes, then you must escape the double quotes that are inside the policy:

\"{\\\"attribute\\\": \\\"value\\\", \\\"attribute\\\": [\\\"value\\\"]}\"

To avoid escaping quotes, you can use single quotes to enclose the policy and double quotes to enclose the JSON names and values:

'{\"attribute\": \"value\", \"attribute\": [\"value\"]}'

", + "CreateEntityRecognizerRequest$ModelPolicy": "

The JSON resource-based policy to attach to your custom entity recognizer model. You can use this policy to allow another Amazon Web Services account to import your custom model.

Provide your JSON as a UTF-8 encoded string without line breaks. To provide valid JSON for your policy, enclose the attribute names and values in double quotes. If the JSON body is also enclosed in double quotes, then you must escape the double quotes that are inside the policy:

\"{\\\"attribute\\\": \\\"value\\\", \\\"attribute\\\": [\\\"value\\\"]}\"

To avoid escaping quotes, you can use single quotes to enclose the policy and double quotes to enclose the JSON names and values:

'{\"attribute\": \"value\", \"attribute\": [\"value\"]}'

", "DescribeResourcePolicyResponse$ResourcePolicy": "

The JSON body of the resource-based policy.

", "PutResourcePolicyRequest$ResourcePolicy": "

The JSON resource-based policy to attach to your custom model. Provide your JSON as a UTF-8 encoded string without line breaks. To provide valid JSON for your policy, enclose the attribute names and values in double quotes. If the JSON body is also enclosed in double quotes, then you must escape the double quotes that are inside the policy:

\"{\\\"attribute\\\": \\\"value\\\", \\\"attribute\\\": [\\\"value\\\"]}\"

To avoid escaping quotes, you can use single quotes to enclose the policy and double quotes to enclose the JSON names and values:

'{\"attribute\": \"value\", \"attribute\": [\"value\"]}'

" } @@ -2480,26 +2481,26 @@ "DatasetAugmentedManifestsListItem$S3Uri": "

The Amazon S3 location of the augmented manifest file.

", "DatasetAugmentedManifestsListItem$AnnotationDataS3Uri": "

The S3 prefix to the annotation files that are referred in the augmented manifest file.

", "DatasetAugmentedManifestsListItem$SourceDocumentsS3Uri": "

The S3 prefix to the source files (PDFs) that are referred to in the augmented manifest file.

", - "DatasetDocumentClassifierInputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The S3 bucket must be in the same region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of input files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

This parameter is required if you set DataFormat to COMPREHEND_CSV.

", - "DatasetEntityRecognizerAnnotations$S3Uri": "

Specifies the Amazon S3 location where the training documents for an entity recognizer are located. The URI must be in the same region as the API endpoint that you are calling.

", + "DatasetDocumentClassifierInputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The S3 bucket must be in the same Region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of input files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

This parameter is required if you set DataFormat to COMPREHEND_CSV.

", + "DatasetEntityRecognizerAnnotations$S3Uri": "

Specifies the Amazon S3 location where the training documents for an entity recognizer are located. The URI must be in the same Region as the API endpoint that you are calling.

", "DatasetEntityRecognizerDocuments$S3Uri": "

Specifies the Amazon S3 location where the documents for the dataset are located.

", "DatasetEntityRecognizerEntityList$S3Uri": "

Specifies the Amazon S3 location where the entity list is located.

", "DatasetProperties$DatasetS3Uri": "

The S3 URI where the dataset is stored.

", - "DocumentClassifierInputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The S3 bucket must be in the same region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of input files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

This parameter is required if you set DataFormat to COMPREHEND_CSV.

", - "DocumentClassifierInputDataConfig$TestS3Uri": "

This specifies the Amazon S3 location where the test annotations for an entity recognizer are located. The URI must be in the same AWS Region as the API endpoint that you are calling.

", - "DocumentClassifierOutputDataConfig$S3Uri": "

When you use the OutputDataConfig object while creating a custom classifier, you specify the Amazon S3 location where you want to write the confusion matrix. The URI must be in the same region as the API endpoint that you are calling. The location is used as the prefix for the actual location of this output file.

When the custom classifier job is finished, the service creates the output file in a directory specific to the job. The S3Uri field contains the location of the output file, called output.tar.gz. It is a compressed archive that contains the confusion matrix.

", + "DocumentClassifierInputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The S3 bucket must be in the same Region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of input files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

This parameter is required if you set DataFormat to COMPREHEND_CSV.

", + "DocumentClassifierInputDataConfig$TestS3Uri": "

This specifies the Amazon S3 location where the test annotations for an entity recognizer are located. The URI must be in the same Amazon Web Services Region as the API endpoint that you are calling.

", + "DocumentClassifierOutputDataConfig$S3Uri": "

When you use the OutputDataConfig object while creating a custom classifier, you specify the Amazon S3 location where you want to write the confusion matrix. The URI must be in the same Region as the API endpoint that you are calling. The location is used as the prefix for the actual location of this output file.

When the custom classifier job is finished, the service creates the output file in a directory specific to the job. The S3Uri field contains the location of the output file, called output.tar.gz. It is a compressed archive that contains the confusion matrix.

", "DocumentClassifierOutputDataConfig$FlywheelStatsS3Prefix": "

The Amazon S3 prefix for the data lake location of the flywheel statistics.

", - "EntityRecognizerAnnotations$S3Uri": "

Specifies the Amazon S3 location where the annotations for an entity recognizer are located. The URI must be in the same region as the API endpoint that you are calling.

", - "EntityRecognizerAnnotations$TestS3Uri": "

Specifies the Amazon S3 location where the test annotations for an entity recognizer are located. The URI must be in the same region as the API endpoint that you are calling.

", - "EntityRecognizerDocuments$S3Uri": "

Specifies the Amazon S3 location where the training documents for an entity recognizer are located. The URI must be in the same region as the API endpoint that you are calling.

", - "EntityRecognizerDocuments$TestS3Uri": "

Specifies the Amazon S3 location where the test documents for an entity recognizer are located. The URI must be in the same AWS Region as the API endpoint that you are calling.

", - "EntityRecognizerEntityList$S3Uri": "

Specifies the Amazon S3 location where the entity list is located. The URI must be in the same region as the API endpoint that you are calling.

", + "EntityRecognizerAnnotations$S3Uri": "

Specifies the Amazon S3 location where the annotations for an entity recognizer are located. The URI must be in the same Region as the API endpoint that you are calling.

", + "EntityRecognizerAnnotations$TestS3Uri": "

Specifies the Amazon S3 location where the test annotations for an entity recognizer are located. The URI must be in the same Region as the API endpoint that you are calling.

", + "EntityRecognizerDocuments$S3Uri": "

Specifies the Amazon S3 location where the training documents for an entity recognizer are located. The URI must be in the same Region as the API endpoint that you are calling.

", + "EntityRecognizerDocuments$TestS3Uri": "

Specifies the Amazon S3 location where the test documents for an entity recognizer are located. The URI must be in the same Amazon Web Services Region as the API endpoint that you are calling.

", + "EntityRecognizerEntityList$S3Uri": "

Specifies the Amazon S3 location where the entity list is located. The URI must be in the same Region as the API endpoint that you are calling.

", "EntityRecognizerOutputDataConfig$FlywheelStatsS3Prefix": "

The Amazon S3 prefix for the data lake location of the flywheel statistics.

", "FlywheelIterationProperties$EvaluationManifestS3Prefix": "

", "FlywheelProperties$DataLakeS3Uri": "

Amazon S3 URI of the data lake location.

", "FlywheelSummary$DataLakeS3Uri": "

Amazon S3 URI of the data lake location.

", - "InputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The URI must be in same region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of data files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

", - "OutputDataConfig$S3Uri": "

When you use the OutputDataConfig object with asynchronous operations, you specify the Amazon S3 location where you want to write the output data. The URI must be in the same region as the API endpoint that you are calling. The location is used as the prefix for the actual location of the output file.

When the topic detection job is finished, the service creates an output file in a directory specific to the job. The S3Uri field contains the location of the output file, called output.tar.gz. It is a compressed archive that contains the ouput of the operation.

For a PII entity detection job, the output file is plain text, not a compressed archive. The output file name is the same as the input file, with .out appended at the end.

", + "InputDataConfig$S3Uri": "

The Amazon S3 URI for the input data. The URI must be in same Region as the API endpoint that you are calling. The URI can point to a single input file or it can provide the prefix for a collection of data files.

For example, if you use the URI S3://bucketName/prefix, if the prefix is a single file, Amazon Comprehend uses that file as input. If more than one file begins with the prefix, Amazon Comprehend uses all of them as input.

", + "OutputDataConfig$S3Uri": "

When you use the OutputDataConfig object with asynchronous operations, you specify the Amazon S3 location where you want to write the output data. The URI must be in the same Region as the API endpoint that you are calling. The location is used as the prefix for the actual location of the output file.

When the topic detection job is finished, the service creates an output file in a directory specific to the job. The S3Uri field contains the location of the output file, called output.tar.gz. It is a compressed archive that contains the ouput of the operation.

For a PII entity detection job, the output file is plain text, not a compressed archive. The output file name is the same as the input file, with .out appended at the end.

", "PiiOutputDataConfig$S3Uri": "

When you use the PiiOutputDataConfig object with asynchronous operations, you specify the Amazon S3 location where you want to write the output data.

For a PII entity detection job, the output file is plain text, not a compressed archive. The output file name is the same as the input file, with .out appended at the end.

" } }, @@ -2840,7 +2841,7 @@ "Subnets": { "base": null, "refs": { - "VpcConfig$Subnets": "

The ID for each subnet being used in your private VPC. This subnet is a subset of the a range of IPv4 addresses used by the VPC and is specific to a given availability zone in the VPC’s region. This ID number is preceded by \"subnet-\", for instance: \"subnet-04ccf456919e69055\". For more information, see VPCs and Subnets.

" + "VpcConfig$Subnets": "

The ID for each subnet being used in your private VPC. This subnet is a subset of the a range of IPv4 addresses used by the VPC and is specific to a given availability zone in the VPC’s Region. This ID number is preceded by \"subnet-\", for instance: \"subnet-04ccf456919e69055\". For more information, see VPCs and Subnets.

" } }, "SyntaxLanguageCode": { @@ -3122,13 +3123,13 @@ "VersionName": { "base": null, "refs": { - "CreateDocumentClassifierRequest$VersionName": "

The version name given to the newly created classifier. Version names can have a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same classifier name in the account/AWS Region.

", - "CreateEntityRecognizerRequest$VersionName": "

The version name given to the newly created recognizer. Version names can be a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same recognizer name in the account/ AWS Region.

", + "CreateDocumentClassifierRequest$VersionName": "

The version name given to the newly created classifier. Version names can have a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same classifier name in the Amazon Web Services account/Amazon Web Services Region.

", + "CreateEntityRecognizerRequest$VersionName": "

The version name given to the newly created recognizer. Version names can be a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same recognizer name in the account/Region.

", "DocumentClassifierProperties$VersionName": "

The version name that you assigned to the document classifier.

", "DocumentClassifierSummary$LatestVersionName": "

The version name you assigned to the latest document classifier version.

", "EntityRecognizerProperties$VersionName": "

The version name you assigned to the entity recognizer.

", "EntityRecognizerSummary$LatestVersionName": "

The version name you assigned to the latest entity recognizer version.

", - "ImportModelRequest$VersionName": "

The version name given to the custom model that is created by this import. Version names can have a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same classifier name in the account/AWS Region.

" + "ImportModelRequest$VersionName": "

The version name given to the custom model that is created by this import. Version names can have a maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores (_) are allowed. The version name must be unique among all models with the same classifier name in the account/Region.

" } }, "VpcConfig": { diff --git a/models/apis/comprehend/2017-11-27/endpoint-tests-1.json b/models/apis/comprehend/2017-11-27/endpoint-tests-1.json index e75ad3540f7..a63e830fa03 100644 --- a/models/apis/comprehend/2017-11-27/endpoint-tests-1.json +++ b/models/apis/comprehend/2017-11-27/endpoint-tests-1.json @@ -8,9 +8,9 @@ } }, "params": { - "Region": "ap-northeast-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ap-northeast-1" } }, { @@ -21,9 +21,9 @@ } }, "params": { - "Region": "ap-northeast-2", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ap-northeast-2" } }, { @@ -34,9 +34,9 @@ } }, "params": { - "Region": "ap-south-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ap-south-1" } }, { @@ -47,9 +47,9 @@ } }, "params": { - "Region": "ap-southeast-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ap-southeast-1" } }, { @@ -60,9 +60,9 @@ } }, "params": { - "Region": "ap-southeast-2", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ap-southeast-2" } }, { @@ -73,9 +73,9 @@ } }, "params": { - "Region": "ca-central-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "ca-central-1" } }, { @@ -86,9 +86,9 @@ } }, "params": { - "Region": "eu-central-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "eu-central-1" } }, { @@ -99,9 +99,9 @@ } }, "params": { - "Region": "eu-west-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "eu-west-1" } }, { @@ -112,9 +112,9 @@ } }, "params": { - "Region": "eu-west-2", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "eu-west-2" } }, { @@ -125,9 +125,9 @@ } }, "params": { - "Region": "us-east-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-east-1" } }, { @@ -138,9 +138,9 @@ } }, "params": { - "Region": "us-east-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-east-1" } }, { @@ -151,9 +151,9 @@ } }, "params": { - "Region": "us-east-2", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-east-2" } }, { @@ -164,9 +164,9 @@ } }, "params": { - "Region": "us-east-2", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-east-2" } }, { @@ -177,9 +177,9 @@ } }, "params": { - "Region": "us-west-2", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-west-2" } }, { @@ -190,9 +190,9 @@ } }, "params": { - "Region": "us-west-2", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-west-2" } }, { @@ -203,9 +203,9 @@ } }, "params": { - "Region": "us-east-1", "UseFIPS": true, - "UseDualStack": true + "UseDualStack": true, + "Region": "us-east-1" } }, { @@ -216,9 +216,9 @@ } }, "params": { - "Region": "us-east-1", "UseFIPS": false, - "UseDualStack": true + "UseDualStack": true, + "Region": "us-east-1" } }, { @@ -229,9 +229,9 @@ } }, "params": { - "Region": "cn-north-1", "UseFIPS": true, - "UseDualStack": true + "UseDualStack": true, + "Region": "cn-north-1" } }, { @@ -242,9 +242,9 @@ } }, "params": { - "Region": "cn-north-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "cn-north-1" } }, { @@ -255,9 +255,9 @@ } }, "params": { - "Region": "cn-north-1", "UseFIPS": false, - "UseDualStack": true + "UseDualStack": true, + "Region": "cn-north-1" } }, { @@ -268,9 +268,9 @@ } }, "params": { - "Region": "cn-north-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "cn-north-1" } }, { @@ -281,9 +281,9 @@ } }, "params": { - "Region": "us-gov-west-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-gov-west-1" } }, { @@ -294,9 +294,9 @@ } }, "params": { - "Region": "us-gov-west-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-gov-west-1" } }, { @@ -307,9 +307,9 @@ } }, "params": { - "Region": "us-gov-east-1", "UseFIPS": true, - "UseDualStack": true + "UseDualStack": true, + "Region": "us-gov-east-1" } }, { @@ -320,9 +320,9 @@ } }, "params": { - "Region": "us-gov-east-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-gov-east-1" } }, { @@ -333,9 +333,9 @@ } }, "params": { - "Region": "us-gov-east-1", "UseFIPS": false, - "UseDualStack": true + "UseDualStack": true, + "Region": "us-gov-east-1" } }, { @@ -346,9 +346,9 @@ } }, "params": { - "Region": "us-gov-east-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-gov-east-1" } }, { @@ -359,9 +359,20 @@ } }, "params": { - "Region": "us-iso-east-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-iso-east-1" + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "UseFIPS": true, + "UseDualStack": true, + "Region": "us-iso-east-1" } }, { @@ -372,9 +383,31 @@ } }, "params": { - "Region": "us-iso-east-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-iso-east-1" + } + }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "UseFIPS": false, + "UseDualStack": true, + "Region": "us-iso-east-1" + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "UseFIPS": true, + "UseDualStack": true, + "Region": "us-isob-east-1" } }, { @@ -385,9 +418,20 @@ } }, "params": { - "Region": "us-isob-east-1", "UseFIPS": true, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-isob-east-1" + } + }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "UseFIPS": false, + "UseDualStack": true, + "Region": "us-isob-east-1" } }, { @@ -398,9 +442,9 @@ } }, "params": { - "Region": "us-isob-east-1", "UseFIPS": false, - "UseDualStack": false + "UseDualStack": false, + "Region": "us-isob-east-1" } }, { @@ -411,9 +455,9 @@ } }, "params": { - "Region": "us-east-1", "UseFIPS": false, "UseDualStack": false, + "Region": "us-east-1", "Endpoint": "https://example.com" } }, @@ -436,9 +480,9 @@ "error": "Invalid Configuration: FIPS and custom endpoint are not supported" }, "params": { - "Region": "us-east-1", "UseFIPS": true, "UseDualStack": false, + "Region": "us-east-1", "Endpoint": "https://example.com" } }, @@ -448,11 +492,17 @@ "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" }, "params": { - "Region": "us-east-1", "UseFIPS": false, "UseDualStack": true, + "Region": "us-east-1", "Endpoint": "https://example.com" } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } } ], "version": "1.0" diff --git a/models/apis/monitoring/2010-08-01/docs-2.json b/models/apis/monitoring/2010-08-01/docs-2.json index 83cce3ee46d..6121c7a7d10 100644 --- a/models/apis/monitoring/2010-08-01/docs-2.json +++ b/models/apis/monitoring/2010-08-01/docs-2.json @@ -2,7 +2,7 @@ "version": "2.0", "service": "

Amazon CloudWatch monitors your Amazon Web Services (Amazon Web Services) resources and the applications you run on Amazon Web Services in real time. You can use CloudWatch to collect and track metrics, which are the variables you want to measure for your resources and applications.

CloudWatch alarms send notifications or automatically change the resources you are monitoring based on rules that you define. For example, you can monitor the CPU usage and disk reads and writes of your Amazon EC2 instances. Then, use this data to determine whether you should launch additional instances to handle increased load. You can also use this data to stop under-used instances to save money.

In addition to monitoring the built-in metrics that come with Amazon Web Services, you can monitor your own custom metrics. With CloudWatch, you gain system-wide visibility into resource utilization, application performance, and operational health.

", "operations": { - "DeleteAlarms": "

Deletes the specified alarms. You can delete up to 100 alarms in one operation. However, this total can include no more than one composite alarm. For example, you could delete 99 metric alarms and one composite alarms with one operation, but you can't delete two composite alarms with one operation.

In the event of an error, no alarms are deleted.

It is possible to create a loop or cycle of composite alarms, where composite alarm A depends on composite alarm B, and composite alarm B also depends on composite alarm A. In this scenario, you can't delete any composite alarm that is part of the cycle because there is always still a composite alarm that depends on that alarm that you want to delete.

To get out of such a situation, you must break the cycle by changing the rule of one of the composite alarms in the cycle to remove a dependency that creates the cycle. The simplest change to make to break a cycle is to change the AlarmRule of one of the alarms to false.

Additionally, the evaluation of composite alarms stops if CloudWatch detects a cycle in the evaluation path.

", + "DeleteAlarms": "

Deletes the specified alarms. You can delete up to 100 alarms in one operation. However, this total can include no more than one composite alarm. For example, you could delete 99 metric alarms and one composite alarms with one operation, but you can't delete two composite alarms with one operation.

If you specify an incorrect alarm name or make any other error in the operation, no alarms are deleted. To confirm that alarms were deleted successfully, you can use the DescribeAlarms operation after using DeleteAlarms.

It is possible to create a loop or cycle of composite alarms, where composite alarm A depends on composite alarm B, and composite alarm B also depends on composite alarm A. In this scenario, you can't delete any composite alarm that is part of the cycle because there is always still a composite alarm that depends on that alarm that you want to delete.

To get out of such a situation, you must break the cycle by changing the rule of one of the composite alarms in the cycle to remove a dependency that creates the cycle. The simplest change to make to break a cycle is to change the AlarmRule of one of the alarms to false.

Additionally, the evaluation of composite alarms stops if CloudWatch detects a cycle in the evaluation path.

", "DeleteAnomalyDetector": "

Deletes the specified anomaly detection model from your account. For more information about how to delete an anomaly detection model, see Deleting an anomaly detection model in the CloudWatch User Guide.

", "DeleteDashboards": "

Deletes all dashboards that you specify. You can specify up to 100 dashboards to delete. If there is an error during this call, no dashboards are deleted.

", "DeleteInsightRules": "

Permanently deletes the specified Contributor Insights rules.

If you create a rule, delete it, and then re-create it with the same name, historical data from the first time the rule was created might not be available.

", @@ -32,7 +32,7 @@ "PutDashboard": "

Creates a dashboard if it does not already exist, or updates an existing dashboard. If you update a dashboard, the entire contents are replaced with what you specify here.

All dashboards in your account are global, not region-specific.

A simple way to create a dashboard using PutDashboard is to copy an existing dashboard. To copy an existing dashboard using the console, you can load the dashboard and then use the View/edit source command in the Actions menu to display the JSON block for that dashboard. Another way to copy a dashboard is to use GetDashboard, and then use the data returned within DashboardBody as the template for the new dashboard when you call PutDashboard.

When you create a dashboard with PutDashboard, a good practice is to add a text widget at the top of the dashboard with a message that the dashboard was created by script and should not be changed in the console. This message could also point console users to the location of the DashboardBody script or the CloudFormation template used to create the dashboard.

", "PutInsightRule": "

Creates a Contributor Insights rule. Rules evaluate log events in a CloudWatch Logs log group, enabling you to find contributor data for the log events in that log group. For more information, see Using Contributor Insights to Analyze High-Cardinality Data.

If you create a rule, delete it, and then re-create it with the same name, historical data from the first time the rule was created might not be available.

", "PutManagedInsightRules": "

Creates a managed Contributor Insights rule for a specified Amazon Web Services resource. When you enable a managed rule, you create a Contributor Insights rule that collects data from Amazon Web Services services. You cannot edit these rules with PutInsightRule. The rules can be enabled, disabled, and deleted using EnableInsightRules, DisableInsightRules, and DeleteInsightRules. If a previously created managed rule is currently disabled, a subsequent call to this API will re-enable it. Use ListManagedInsightRules to describe all available rules.

", - "PutMetricAlarm": "

Creates or updates an alarm and associates it with the specified metric, metric math expression, anomaly detection model, or Metrics Insights query. For more information about using a Metrics Insights query for an alarm, see Create alarms on Metrics Insights queries.

Alarms based on anomaly detection models cannot have Auto Scaling actions.

When this operation creates an alarm, the alarm state is immediately set to INSUFFICIENT_DATA. The alarm is then evaluated and its state is set appropriately. Any actions associated with the new state are then executed.

When you update an existing alarm, its state is left unchanged, but the update completely overwrites the previous configuration of the alarm.

If you are an IAM user, you must have Amazon EC2 permissions for some alarm operations:

The first time you create an alarm in the Amazon Web Services Management Console, the CLI, or by using the PutMetricAlarm API, CloudWatch creates the necessary service-linked role for you. The service-linked roles are called AWSServiceRoleForCloudWatchEvents and AWSServiceRoleForCloudWatchAlarms_ActionSSM. For more information, see Amazon Web Services service-linked role.

Cross-account alarms

You can set an alarm on metrics in the current account, or in another account. To create a cross-account alarm that watches a metric in a different account, you must have completed the following pre-requisites:

", + "PutMetricAlarm": "

Creates or updates an alarm and associates it with the specified metric, metric math expression, anomaly detection model, or Metrics Insights query. For more information about using a Metrics Insights query for an alarm, see Create alarms on Metrics Insights queries.

Alarms based on anomaly detection models cannot have Auto Scaling actions.

When this operation creates an alarm, the alarm state is immediately set to INSUFFICIENT_DATA. The alarm is then evaluated and its state is set appropriately. Any actions associated with the new state are then executed.

When you update an existing alarm, its state is left unchanged, but the update completely overwrites the previous configuration of the alarm.

If you are an IAM user, you must have Amazon EC2 permissions for some alarm operations:

The first time you create an alarm in the Amazon Web Services Management Console, the CLI, or by using the PutMetricAlarm API, CloudWatch creates the necessary service-linked role for you. The service-linked roles are called AWSServiceRoleForCloudWatchEvents and AWSServiceRoleForCloudWatchAlarms_ActionSSM. For more information, see Amazon Web Services service-linked role.

Cross-account alarms

You can set an alarm on metrics in the current account, or in another account. To create a cross-account alarm that watches a metric in a different account, you must have completed the following pre-requisites:

", "PutMetricData": "

Publishes metric data points to Amazon CloudWatch. CloudWatch associates the data points with the specified metric. If the specified metric does not exist, CloudWatch creates the metric. When CloudWatch creates a metric, it can take up to fifteen minutes for the metric to appear in calls to ListMetrics.

You can publish either individual data points in the Value field, or arrays of values and the number of times each value occurred during the period by using the Values and Counts fields in the MetricDatum structure. Using the Values and Counts method enables you to publish up to 150 values per metric with one PutMetricData request, and supports retrieving percentile statistics on this data.

Each PutMetricData request is limited to 1 MB in size for HTTP POST requests. You can send a payload compressed by gzip. Each request is also limited to no more than 1000 different metrics.

Although the Value parameter accepts numbers of type Double, CloudWatch rejects values that are either too small or too large. Values must be in the range of -2^360 to 2^360. In addition, special values (for example, NaN, +Infinity, -Infinity) are not supported.

You can use up to 30 dimensions per metric to further clarify what data the metric collects. Each dimension consists of a Name and Value pair. For more information about specifying dimensions, see Publishing Metrics in the Amazon CloudWatch User Guide.

You specify the time stamp to be associated with each data point. You can specify time stamps that are as much as two weeks before the current date, and as much as 2 hours after the current day and time.

Data points with time stamps from 24 hours ago or longer can take at least 48 hours to become available for GetMetricData or GetMetricStatistics from the time they are submitted. Data points with time stamps between 3 and 24 hours ago can take as much as 2 hours to become available for for GetMetricData or GetMetricStatistics.

CloudWatch needs raw data points to calculate percentile statistics. If you publish data using a statistic set instead, you can only retrieve percentile statistics for this data if one of the following conditions is true:

", "PutMetricStream": "

Creates or updates a metric stream. Metric streams can automatically stream CloudWatch metrics to Amazon Web Services destinations, including Amazon S3, and to many third-party solutions.

For more information, see Using Metric Streams.

To create a metric stream, you must be signed in to an account that has the iam:PassRole permission and either the CloudWatchFullAccess policy or the cloudwatch:PutMetricStream permission.

When you create or update a metric stream, you choose one of the following:

By default, a metric stream always sends the MAX, MIN, SUM, and SAMPLECOUNT statistics for each metric that is streamed. You can use the StatisticsConfigurations parameter to have the metric stream send additional statistics in the stream. Streaming additional statistics incurs additional costs. For more information, see Amazon CloudWatch Pricing.

When you use PutMetricStream to create a new metric stream, the stream is created in the running state. If you use it to update an existing stream, the state of the stream is not changed.

If you are using CloudWatch cross-account observability and you create a metric stream in a monitoring account, you can choose whether to include metrics from source accounts in the stream. For more information, see CloudWatch cross-account observability.

", "SetAlarmState": "

Temporarily sets the state of an alarm for testing purposes. When the updated state differs from the previous value, the action configured for the appropriate state is invoked. For example, if your alarm is configured to send an Amazon SNS message when an alarm is triggered, temporarily changing the alarm state to ALARM sends an SNS message.

Metric alarms returns to their actual state quickly, often within seconds. Because the metric alarm state change happens quickly, it is typically only visible in the alarm's History tab in the Amazon CloudWatch console or through DescribeAlarmHistory.

If you use SetAlarmState on a composite alarm, the composite alarm is not guaranteed to return to its actual state. It returns to its actual state only once any of its children alarms change state. It is also reevaluated if you update its configuration.

If an alarm triggers EC2 Auto Scaling policies or application Auto Scaling policies, you must include information in the StateReasonData parameter to enable the policy to take the correct action.

", @@ -118,7 +118,7 @@ "DescribeAlarmsInput$ParentsOfAlarmName": "

If you use this parameter and specify the name of a metric or composite alarm, the operation returns information about the \"parent\" alarms of the alarm you specify. These are the composite alarms that have AlarmRule parameters that reference the alarm named in ParentsOfAlarmName. Information about the alarm that you specify in ParentsOfAlarmName is not returned.

If you specify ParentsOfAlarmName, you cannot specify any other parameters in the request except for MaxRecords and NextToken. If you do so, you receive a validation error.

Only the Alarm Name and ARN are returned by this operation when you use this parameter. To get complete information about these alarms, perform another DescribeAlarms operation and specify the parent alarm names in the AlarmNames parameter.

", "MetricAlarm$AlarmName": "

The name of the alarm.

", "PutCompositeAlarmInput$AlarmName": "

The name for the composite alarm. This name must be unique within the Region.

", - "PutMetricAlarmInput$AlarmName": "

The name for the alarm. This name must be unique within the Region.

", + "PutMetricAlarmInput$AlarmName": "

The name for the alarm. This name must be unique within the Region.

The name must contain only UTF-8 characters, and can't contain ASCII control characters

", "SetAlarmStateInput$AlarmName": "

The name of the alarm.

" } }, @@ -131,7 +131,7 @@ "AlarmNames": { "base": null, "refs": { - "DeleteAlarmsInput$AlarmNames": "

The alarms to be deleted.

", + "DeleteAlarmsInput$AlarmNames": "

The alarms to be deleted. Do not enclose the alarm names in quote marks.

", "DescribeAlarmsInput$AlarmNames": "

The names of the alarms to retrieve information about.

", "DisableAlarmActionsInput$AlarmNames": "

The names of the alarms.

", "EnableAlarmActionsInput$AlarmNames": "

The names of the alarms.

" @@ -532,14 +532,14 @@ "DimensionName": { "base": null, "refs": { - "Dimension$Name": "

The name of the dimension. Dimension names must contain only ASCII characters, must include at least one non-whitespace character, and cannot start with a colon (:).

", + "Dimension$Name": "

The name of the dimension. Dimension names must contain only ASCII characters, must include at least one non-whitespace character, and cannot start with a colon (:). ASCII control characters are not supported as part of dimension names.

", "DimensionFilter$Name": "

The dimension name to be matched.

" } }, "DimensionValue": { "base": null, "refs": { - "Dimension$Value": "

The value of the dimension. Dimension values must contain only ASCII characters and must include at least one non-whitespace character.

", + "Dimension$Value": "

The value of the dimension. Dimension values must contain only ASCII characters and must include at least one non-whitespace character. ASCII control characters are not supported as part of dimension values.

", "DimensionFilter$Value": "

The value of the dimension to be matched.

" } }, @@ -553,7 +553,7 @@ "GetMetricStatisticsInput$Dimensions": "

The dimensions. If the metric contains multiple dimensions, you must include a value for each dimension. CloudWatch treats each unique combination of dimensions as a separate metric. If a specific combination of dimensions was not published, you can't retrieve statistics for it. You must specify the same dimensions that were used when the metrics were created. For an example, see Dimension Combinations in the Amazon CloudWatch User Guide. For more information about specifying dimensions, see Publishing Metrics in the Amazon CloudWatch User Guide.

", "Metric$Dimensions": "

The dimensions for the metric.

", "MetricAlarm$Dimensions": "

The dimensions for the metric associated with the alarm.

", - "MetricDatum$Dimensions": "

The dimensions associated with the metric.

", + "MetricDatum$Dimensions": "

The dimensions associated with the metric.

", "PutAnomalyDetectorInput$Dimensions": "

The metric dimensions to create the anomaly detection model for.

", "PutMetricAlarmInput$Dimensions": "

The dimensions for the metric specified in MetricName.

", "SingleMetricAnomalyDetector$Dimensions": "

The metric dimensions to create the anomaly detection model for.

" @@ -1247,7 +1247,7 @@ } }, "MetricStreamFilter": { - "base": "

This structure contains the name of one of the metric namespaces that is listed in a filter of a metric stream.

", + "base": "

This structure contains the name of one of the metric namespaces that is listed in a filter of a metric stream.

The namespace can contain only ASCII printable characters (ASCII range 32 through 126). It must contain at least one non-whitespace character.

", "refs": { "MetricStreamFilters$member": null } @@ -1369,7 +1369,7 @@ "MetricStreamStatisticsMetric$Namespace": "

The namespace of the metric.

", "PutAnomalyDetectorInput$Namespace": "

The namespace of the metric to create the anomaly detection model for.

", "PutMetricAlarmInput$Namespace": "

The namespace for the metric associated specified in MetricName.

", - "PutMetricDataInput$Namespace": "

The namespace for the metric data.

To avoid conflicts with Amazon Web Services service namespaces, you should not specify a namespace that begins with AWS/

", + "PutMetricDataInput$Namespace": "

The namespace for the metric data. You can use ASCII characters for the namespace, except for control characters which are not supported.

To avoid conflicts with Amazon Web Services service namespaces, you should not specify a namespace that begins with AWS/

", "SingleMetricAnomalyDetector$Namespace": "

The namespace of the metric to create the anomaly detection model for.

" } }, @@ -1521,9 +1521,9 @@ "PutCompositeAlarmInput$AlarmActions": "

The actions to execute when this alarm transitions to the ALARM state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:sns:region:account-id:sns-topic-name | arn:aws:ssm:region:account-id:opsitem:severity

", "PutCompositeAlarmInput$InsufficientDataActions": "

The actions to execute when this alarm transitions to the INSUFFICIENT_DATA state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:sns:region:account-id:sns-topic-name

", "PutCompositeAlarmInput$OKActions": "

The actions to execute when this alarm transitions to an OK state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:sns:region:account-id:sns-topic-name

", - "PutMetricAlarmInput$OKActions": "

The actions to execute when this alarm transitions to an OK state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name

Valid Values (for use with IAM roles): arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0

", - "PutMetricAlarmInput$AlarmActions": "

The actions to execute when this alarm transitions to the ALARM state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name | arn:aws:ssm:region:account-id:opsitem:severity | arn:aws:ssm-incidents::account-id:response-plan:response-plan-name

Valid Values (for use with IAM roles): arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0

", - "PutMetricAlarmInput$InsufficientDataActions": "

The actions to execute when this alarm transitions to the INSUFFICIENT_DATA state from any other state. Each action is specified as an Amazon Resource Name (ARN).

Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name

Valid Values (for use with IAM roles): >arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0

" + "PutMetricAlarmInput$OKActions": "

The actions to execute when this alarm transitions to an OK state from any other state. Each action is specified as an Amazon Resource Name (ARN). Valid values:

EC2 actions:

Autoscaling action:

SSN notification action:

SSM integration actions:

", + "PutMetricAlarmInput$AlarmActions": "

The actions to execute when this alarm transitions to the ALARM state from any other state. Each action is specified as an Amazon Resource Name (ARN). Valid values:

EC2 actions:

Autoscaling action:

SSN notification action:

SSM integration actions:

", + "PutMetricAlarmInput$InsufficientDataActions": "

The actions to execute when this alarm transitions to the INSUFFICIENT_DATA state from any other state. Each action is specified as an Amazon Resource Name (ARN). Valid values:

EC2 actions:

Autoscaling action:

SSN notification action:

SSM integration actions:

" } }, "ResourceName": { diff --git a/models/apis/monitoring/2010-08-01/endpoint-rule-set-1.json b/models/apis/monitoring/2010-08-01/endpoint-rule-set-1.json index b155f7ae141..94f8f398869 100644 --- a/models/apis/monitoring/2010-08-01/endpoint-rule-set-1.json +++ b/models/apis/monitoring/2010-08-01/endpoint-rule-set-1.json @@ -3,7 +3,7 @@ "parameters": { "Region": { "builtIn": "AWS::Region", - "required": true, + "required": false, "documentation": "The AWS region used to dispatch the request.", "type": "String" }, @@ -32,13 +32,12 @@ { "conditions": [ { - "fn": "aws.partition", + "fn": "isSet", "argv": [ { - "ref": "Region" + "ref": "Endpoint" } - ], - "assign": "PartitionResult" + ] } ], "type": "tree", @@ -46,14 +45,20 @@ { "conditions": [ { - "fn": "isSet", + "fn": "booleanEquals", "argv": [ { - "ref": "Endpoint" - } + "ref": "UseFIPS" + }, + true ] } ], + "error": "Invalid Configuration: FIPS and custom endpoint are not supported", + "type": "error" + }, + { + "conditions": [], "type": "tree", "rules": [ { @@ -62,67 +67,42 @@ "fn": "booleanEquals", "argv": [ { - "ref": "UseFIPS" + "ref": "UseDualStack" }, true ] } ], - "error": "Invalid Configuration: FIPS and custom endpoint are not supported", + "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", "type": "error" }, { "conditions": [], - "type": "tree", - "rules": [ - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" - }, - true - ] - } - ], - "error": "Invalid Configuration: Dualstack and custom endpoint are not supported", - "type": "error" + "endpoint": { + "url": { + "ref": "Endpoint" }, - { - "conditions": [], - "endpoint": { - "url": { - "ref": "Endpoint" - }, - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] + "properties": {}, + "headers": {} + }, + "type": "endpoint" } ] - }, + } + ] + }, + { + "conditions": [], + "type": "tree", + "rules": [ { "conditions": [ { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" - }, - true - ] - }, - { - "fn": "booleanEquals", + "fn": "isSet", "argv": [ { - "ref": "UseDualStack" - }, - true + "ref": "Region" + } ] } ], @@ -131,199 +111,263 @@ { "conditions": [ { - "fn": "booleanEquals", + "fn": "aws.partition", "argv": [ - true, { - "fn": "getAttr", + "ref": "Region" + } + ], + "assign": "PartitionResult" + } + ], + "type": "tree", + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", "argv": [ { - "ref": "PartitionResult" + "ref": "UseFIPS" }, - "supportsFIPS" + true ] - } - ] - }, - { - "fn": "booleanEquals", - "argv": [ - true, + }, { - "fn": "getAttr", + "fn": "booleanEquals", "argv": [ { - "ref": "PartitionResult" + "ref": "UseDualStack" + }, + true + ] + } + ], + "type": "tree", + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsFIPS" + ] + } + ] }, - "supportsDualStack" + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "type": "tree", + "rules": [ + { + "conditions": [], + "type": "tree", + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://monitoring-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ] + } ] + }, + { + "conditions": [], + "error": "FIPS and DualStack are enabled, but this partition does not support one or both", + "type": "error" } ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], - "endpoint": { - "url": "https://monitoring-fips.{Region}.{PartitionResult#dualStackDnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" - } - ] - }, - { - "conditions": [], - "error": "FIPS and DualStack are enabled, but this partition does not support one or both", - "type": "error" - } - ] - }, - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseFIPS" }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ { - "fn": "booleanEquals", - "argv": [ - true, + "conditions": [ { - "fn": "getAttr", + "fn": "booleanEquals", "argv": [ { - "ref": "PartitionResult" + "ref": "UseFIPS" }, - "supportsFIPS" + true ] } - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [], + ], "type": "tree", "rules": [ { "conditions": [ { - "fn": "stringEquals", + "fn": "booleanEquals", "argv": [ - "aws-us-gov", + true, { "fn": "getAttr", "argv": [ { "ref": "PartitionResult" }, - "name" + "supportsFIPS" ] } ] } ], - "endpoint": { - "url": "https://monitoring.{Region}.{PartitionResult#dnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" + "type": "tree", + "rules": [ + { + "conditions": [], + "type": "tree", + "rules": [ + { + "conditions": [ + { + "fn": "stringEquals", + "argv": [ + "aws-us-gov", + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "name" + ] + } + ] + } + ], + "endpoint": { + "url": "https://monitoring.{Region}.amazonaws.com", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + }, + { + "conditions": [], + "endpoint": { + "url": "https://monitoring-fips.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ] + } + ] }, { "conditions": [], - "endpoint": { - "url": "https://monitoring-fips.{Region}.{PartitionResult#dnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" + "error": "FIPS is enabled but this partition does not support FIPS", + "type": "error" } ] - } - ] - }, - { - "conditions": [], - "error": "FIPS is enabled but this partition does not support FIPS", - "type": "error" - } - ] - }, - { - "conditions": [ - { - "fn": "booleanEquals", - "argv": [ - { - "ref": "UseDualStack" }, - true - ] - } - ], - "type": "tree", - "rules": [ - { - "conditions": [ { - "fn": "booleanEquals", - "argv": [ - true, + "conditions": [ { - "fn": "getAttr", + "fn": "booleanEquals", "argv": [ { - "ref": "PartitionResult" + "ref": "UseDualStack" }, - "supportsDualStack" + true + ] + } + ], + "type": "tree", + "rules": [ + { + "conditions": [ + { + "fn": "booleanEquals", + "argv": [ + true, + { + "fn": "getAttr", + "argv": [ + { + "ref": "PartitionResult" + }, + "supportsDualStack" + ] + } + ] + } + ], + "type": "tree", + "rules": [ + { + "conditions": [], + "type": "tree", + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://monitoring.{Region}.{PartitionResult#dualStackDnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ] + } ] + }, + { + "conditions": [], + "error": "DualStack is enabled but this partition does not support DualStack", + "type": "error" } ] - } - ], - "type": "tree", - "rules": [ + }, { "conditions": [], - "endpoint": { - "url": "https://monitoring.{Region}.{PartitionResult#dualStackDnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" + "type": "tree", + "rules": [ + { + "conditions": [], + "endpoint": { + "url": "https://monitoring.{Region}.{PartitionResult#dnsSuffix}", + "properties": {}, + "headers": {} + }, + "type": "endpoint" + } + ] } ] - }, - { - "conditions": [], - "error": "DualStack is enabled but this partition does not support DualStack", - "type": "error" } ] }, { "conditions": [], - "endpoint": { - "url": "https://monitoring.{Region}.{PartitionResult#dnsSuffix}", - "properties": {}, - "headers": {} - }, - "type": "endpoint" + "error": "Invalid Configuration: Missing Region", + "type": "error" } ] } diff --git a/models/apis/monitoring/2010-08-01/endpoint-tests-1.json b/models/apis/monitoring/2010-08-01/endpoint-tests-1.json index b32656fbc08..1a60d1c5f38 100644 --- a/models/apis/monitoring/2010-08-01/endpoint-tests-1.json +++ b/models/apis/monitoring/2010-08-01/endpoint-tests-1.json @@ -1,1026 +1,29 @@ { "testCases": [ { - "documentation": "For region ap-south-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-south-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-south-2" - } - }, - { - "documentation": "For region ap-south-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-south-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-south-2" - } - }, - { - "documentation": "For region ap-south-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-south-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-south-2" - } - }, - { - "documentation": "For region ap-south-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-south-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "ap-south-2" - } - }, - { - "documentation": "For region ap-south-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-south-1" - } - }, - { - "documentation": "For region ap-south-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-south-1" - } - }, - { - "documentation": "For region ap-south-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-south-1" - } - }, - { - "documentation": "For region ap-south-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "ap-south-1" - } - }, - { - "documentation": "For region eu-south-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-south-1" - } - }, - { - "documentation": "For region eu-south-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-south-1" - } - }, - { - "documentation": "For region eu-south-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-south-1" - } - }, - { - "documentation": "For region eu-south-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-south-1" - } - }, - { - "documentation": "For region eu-south-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-south-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-south-2" - } - }, - { - "documentation": "For region eu-south-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-south-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-south-2" - } - }, - { - "documentation": "For region eu-south-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-south-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-south-2" - } - }, - { - "documentation": "For region eu-south-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-south-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-south-2" - } - }, - { - "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-gov-east-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-gov-east-1" - } - }, - { - "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-gov-east-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "us-gov-east-1" - } - }, - { - "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-gov-east-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "us-gov-east-1" - } - }, - { - "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-gov-east-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "us-gov-east-1" - } - }, - { - "documentation": "For region me-central-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.me-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "me-central-1" - } - }, - { - "documentation": "For region me-central-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.me-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "me-central-1" - } - }, - { - "documentation": "For region me-central-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.me-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "me-central-1" - } - }, - { - "documentation": "For region me-central-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.me-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "me-central-1" - } - }, - { - "documentation": "For region ca-central-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ca-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ca-central-1" - } - }, - { - "documentation": "For region ca-central-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ca-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "ca-central-1" - } - }, - { - "documentation": "For region ca-central-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ca-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ca-central-1" - } - }, - { - "documentation": "For region ca-central-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ca-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "ca-central-1" - } - }, - { - "documentation": "For region eu-central-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-central-1" - } - }, - { - "documentation": "For region eu-central-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-central-1" - } - }, - { - "documentation": "For region eu-central-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-central-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-central-1" - } - }, - { - "documentation": "For region eu-central-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-central-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-central-1" - } - }, - { - "documentation": "For region us-iso-west-1 with FIPS enabled and DualStack enabled", - "expect": { - "error": "FIPS and DualStack are enabled, but this partition does not support one or both" - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-iso-west-1" - } - }, - { - "documentation": "For region us-iso-west-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-iso-west-1.c2s.ic.gov" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "us-iso-west-1" - } - }, - { - "documentation": "For region us-iso-west-1 with FIPS disabled and DualStack enabled", - "expect": { - "error": "DualStack is enabled but this partition does not support DualStack" - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "us-iso-west-1" - } - }, - { - "documentation": "For region us-iso-west-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-iso-west-1.c2s.ic.gov" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "us-iso-west-1" - } - }, - { - "documentation": "For region eu-central-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-central-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-central-2" - } - }, - { - "documentation": "For region eu-central-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-central-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-central-2" - } - }, - { - "documentation": "For region eu-central-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-central-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-central-2" - } - }, - { - "documentation": "For region eu-central-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-central-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-central-2" - } - }, - { - "documentation": "For region us-west-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-west-1" - } - }, - { - "documentation": "For region us-west-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-west-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "us-west-1" - } - }, - { - "documentation": "For region us-west-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "us-west-1" - } - }, - { - "documentation": "For region us-west-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-west-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "us-west-1" - } - }, - { - "documentation": "For region us-west-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-west-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-west-2" - } - }, - { - "documentation": "For region us-west-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-west-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "us-west-2" - } - }, - { - "documentation": "For region us-west-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-west-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "us-west-2" - } - }, - { - "documentation": "For region us-west-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-west-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "us-west-2" - } - }, - { - "documentation": "For region af-south-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.af-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "af-south-1" - } - }, - { - "documentation": "For region af-south-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.af-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "af-south-1" - } - }, - { - "documentation": "For region af-south-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.af-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "af-south-1" - } - }, - { - "documentation": "For region af-south-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.af-south-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "af-south-1" - } - }, - { - "documentation": "For region eu-north-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-north-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-north-1" - } - }, - { - "documentation": "For region eu-north-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-north-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-north-1" - } - }, - { - "documentation": "For region eu-north-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-north-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-north-1" - } - }, - { - "documentation": "For region eu-north-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-north-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-north-1" - } - }, - { - "documentation": "For region eu-west-3 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-3.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-west-3" - } - }, - { - "documentation": "For region eu-west-3 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-3.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-west-3" - } - }, - { - "documentation": "For region eu-west-3 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-3.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-west-3" - } - }, - { - "documentation": "For region eu-west-3 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-3.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-west-3" - } - }, - { - "documentation": "For region eu-west-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-west-2" - } - }, - { - "documentation": "For region eu-west-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-west-2" - } - }, - { - "documentation": "For region eu-west-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-west-2" - } - }, - { - "documentation": "For region eu-west-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-west-2" - } - }, - { - "documentation": "For region eu-west-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "eu-west-1" - } - }, - { - "documentation": "For region eu-west-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.eu-west-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "eu-west-1" - } - }, - { - "documentation": "For region eu-west-1 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "eu-west-1" - } - }, - { - "documentation": "For region eu-west-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.eu-west-1.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "eu-west-1" - } - }, - { - "documentation": "For region ap-northeast-3 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-northeast-3.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-northeast-3" - } - }, - { - "documentation": "For region ap-northeast-3 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-northeast-3.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-northeast-3" - } - }, - { - "documentation": "For region ap-northeast-3 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-northeast-3.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-northeast-3" - } - }, - { - "documentation": "For region ap-northeast-3 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-northeast-3.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "ap-northeast-3" - } - }, - { - "documentation": "For region ap-northeast-2 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-northeast-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-northeast-2" - } - }, - { - "documentation": "For region ap-northeast-2 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-northeast-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-northeast-2" - } - }, - { - "documentation": "For region ap-northeast-2 with FIPS disabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-northeast-2.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-northeast-2" - } - }, - { - "documentation": "For region ap-northeast-2 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-northeast-2.amazonaws.com" - } - }, - "params": { - "UseDualStack": false, - "UseFIPS": false, - "Region": "ap-northeast-2" - } - }, - { - "documentation": "For region ap-northeast-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-northeast-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-northeast-1" - } - }, - { - "documentation": "For region ap-northeast-1 with FIPS enabled and DualStack disabled", + "documentation": "For region af-south-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-northeast-1.amazonaws.com" + "url": "https://monitoring.af-south-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-northeast-1" + "Region": "af-south-1" } }, { - "documentation": "For region ap-northeast-1 with FIPS disabled and DualStack enabled", + "documentation": "For region ap-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-northeast-1.api.aws" + "url": "https://monitoring.ap-east-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "ap-northeast-1" + "UseDualStack": false, + "Region": "ap-east-1" } }, { @@ -1031,733 +34,551 @@ } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "ap-northeast-1" - } - }, - { - "documentation": "For region me-south-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.me-south-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "me-south-1" - } - }, - { - "documentation": "For region me-south-1 with FIPS enabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.me-south-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": true, - "Region": "me-south-1" + "Region": "ap-northeast-1" } }, { - "documentation": "For region me-south-1 with FIPS disabled and DualStack enabled", + "documentation": "For region ap-northeast-2 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.me-south-1.api.aws" + "url": "https://monitoring.ap-northeast-2.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "me-south-1" - } - }, - { - "documentation": "For region me-south-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.me-south-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "me-south-1" - } - }, - { - "documentation": "For region sa-east-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.sa-east-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "sa-east-1" + "Region": "ap-northeast-2" } }, { - "documentation": "For region sa-east-1 with FIPS enabled and DualStack disabled", + "documentation": "For region ap-northeast-3 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.sa-east-1.amazonaws.com" + "url": "https://monitoring.ap-northeast-3.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "sa-east-1" + "Region": "ap-northeast-3" } }, { - "documentation": "For region sa-east-1 with FIPS disabled and DualStack enabled", + "documentation": "For region ap-south-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.sa-east-1.api.aws" + "url": "https://monitoring.ap-south-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "sa-east-1" - } - }, - { - "documentation": "For region sa-east-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.sa-east-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "sa-east-1" - } - }, - { - "documentation": "For region ap-east-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-east-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-east-1" + "Region": "ap-south-1" } }, { - "documentation": "For region ap-east-1 with FIPS enabled and DualStack disabled", + "documentation": "For region ap-southeast-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-east-1.amazonaws.com" + "url": "https://monitoring.ap-southeast-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-east-1" + "Region": "ap-southeast-1" } }, { - "documentation": "For region ap-east-1 with FIPS disabled and DualStack enabled", + "documentation": "For region ap-southeast-2 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-east-1.api.aws" + "url": "https://monitoring.ap-southeast-2.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "ap-east-1" - } - }, - { - "documentation": "For region ap-east-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ap-east-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "ap-east-1" - } - }, - { - "documentation": "For region cn-north-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.cn-north-1.api.amazonwebservices.com.cn" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "cn-north-1" + "Region": "ap-southeast-2" } }, { - "documentation": "For region cn-north-1 with FIPS enabled and DualStack disabled", + "documentation": "For region ap-southeast-3 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.cn-north-1.amazonaws.com.cn" + "url": "https://monitoring.ap-southeast-3.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "cn-north-1" + "Region": "ap-southeast-3" } }, { - "documentation": "For region cn-north-1 with FIPS disabled and DualStack enabled", + "documentation": "For region ca-central-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.cn-north-1.api.amazonwebservices.com.cn" + "url": "https://monitoring.ca-central-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "cn-north-1" - } - }, - { - "documentation": "For region cn-north-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.cn-north-1.amazonaws.com.cn" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "cn-north-1" - } - }, - { - "documentation": "For region ca-west-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ca-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ca-west-1" + "Region": "ca-central-1" } }, { - "documentation": "For region ca-west-1 with FIPS enabled and DualStack disabled", + "documentation": "For region eu-central-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ca-west-1.amazonaws.com" + "url": "https://monitoring.eu-central-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "ca-west-1" + "Region": "eu-central-1" } }, { - "documentation": "For region ca-west-1 with FIPS disabled and DualStack enabled", + "documentation": "For region eu-north-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ca-west-1.api.aws" + "url": "https://monitoring.eu-north-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "ca-west-1" - } - }, - { - "documentation": "For region ca-west-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.ca-west-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "ca-west-1" - } - }, - { - "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.us-gov-west-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-gov-west-1" + "Region": "eu-north-1" } }, { - "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack disabled", + "documentation": "For region eu-south-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-gov-west-1.amazonaws.com" + "url": "https://monitoring.eu-south-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "us-gov-west-1" + "Region": "eu-south-1" } }, { - "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack enabled", + "documentation": "For region eu-west-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-gov-west-1.api.aws" + "url": "https://monitoring.eu-west-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "us-gov-west-1" - } - }, - { - "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack disabled", - "expect": { - "endpoint": { - "url": "https://monitoring.us-gov-west-1.amazonaws.com" - } - }, - "params": { "UseDualStack": false, - "UseFIPS": false, - "Region": "us-gov-west-1" - } - }, - { - "documentation": "For region ap-southeast-1 with FIPS enabled and DualStack enabled", - "expect": { - "endpoint": { - "url": "https://monitoring-fips.ap-southeast-1.api.aws" - } - }, - "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-southeast-1" + "Region": "eu-west-1" } }, { - "documentation": "For region ap-southeast-1 with FIPS enabled and DualStack disabled", + "documentation": "For region eu-west-2 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-1.amazonaws.com" + "url": "https://monitoring.eu-west-2.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-southeast-1" + "Region": "eu-west-2" } }, { - "documentation": "For region ap-southeast-1 with FIPS disabled and DualStack enabled", + "documentation": "For region eu-west-3 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-1.api.aws" + "url": "https://monitoring.eu-west-3.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "ap-southeast-1" + "UseDualStack": false, + "Region": "eu-west-3" } }, { - "documentation": "For region ap-southeast-1 with FIPS disabled and DualStack disabled", + "documentation": "For region me-south-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-1.amazonaws.com" + "url": "https://monitoring.me-south-1.amazonaws.com" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "ap-southeast-1" + "UseDualStack": false, + "Region": "me-south-1" } }, { - "documentation": "For region ap-southeast-2 with FIPS enabled and DualStack enabled", + "documentation": "For region sa-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-2.api.aws" + "url": "https://monitoring.sa-east-1.amazonaws.com" } }, "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-southeast-2" + "UseFIPS": false, + "UseDualStack": false, + "Region": "sa-east-1" } }, { - "documentation": "For region ap-southeast-2 with FIPS enabled and DualStack disabled", + "documentation": "For region us-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-2.amazonaws.com" + "url": "https://monitoring.us-east-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "ap-southeast-2" + "Region": "us-east-1" } }, { - "documentation": "For region ap-southeast-2 with FIPS disabled and DualStack enabled", + "documentation": "For region us-east-1 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-2.api.aws" + "url": "https://monitoring-fips.us-east-1.amazonaws.com" } }, "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-southeast-2" + "UseFIPS": true, + "UseDualStack": false, + "Region": "us-east-1" } }, { - "documentation": "For region ap-southeast-2 with FIPS disabled and DualStack disabled", + "documentation": "For region us-east-2 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-2.amazonaws.com" + "url": "https://monitoring.us-east-2.amazonaws.com" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "ap-southeast-2" + "UseDualStack": false, + "Region": "us-east-2" } }, { - "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", + "documentation": "For region us-east-2 with FIPS enabled and DualStack disabled", "expect": { - "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + "endpoint": { + "url": "https://monitoring-fips.us-east-2.amazonaws.com" + } }, "params": { - "UseDualStack": true, "UseFIPS": true, - "Region": "us-iso-east-1" + "UseDualStack": false, + "Region": "us-east-2" } }, { - "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", + "documentation": "For region us-west-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.us-iso-east-1.c2s.ic.gov" + "url": "https://monitoring.us-west-1.amazonaws.com" } }, "params": { + "UseFIPS": false, "UseDualStack": false, - "UseFIPS": true, - "Region": "us-iso-east-1" + "Region": "us-west-1" } }, { - "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", + "documentation": "For region us-west-1 with FIPS enabled and DualStack disabled", "expect": { - "error": "DualStack is enabled but this partition does not support DualStack" + "endpoint": { + "url": "https://monitoring-fips.us-west-1.amazonaws.com" + } }, "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "us-iso-east-1" + "UseFIPS": true, + "UseDualStack": false, + "Region": "us-west-1" } }, { - "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack disabled", + "documentation": "For region us-west-2 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-iso-east-1.c2s.ic.gov" + "url": "https://monitoring.us-west-2.amazonaws.com" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "us-iso-east-1" + "UseDualStack": false, + "Region": "us-west-2" } }, { - "documentation": "For region ap-southeast-3 with FIPS enabled and DualStack enabled", + "documentation": "For region us-west-2 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-3.api.aws" + "url": "https://monitoring-fips.us-west-2.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": true, - "Region": "ap-southeast-3" + "UseDualStack": false, + "Region": "us-west-2" } }, { - "documentation": "For region ap-southeast-3 with FIPS enabled and DualStack disabled", + "documentation": "For region us-east-1 with FIPS enabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-3.amazonaws.com" + "url": "https://monitoring-fips.us-east-1.api.aws" } }, "params": { - "UseDualStack": false, "UseFIPS": true, - "Region": "ap-southeast-3" + "UseDualStack": true, + "Region": "us-east-1" } }, { - "documentation": "For region ap-southeast-3 with FIPS disabled and DualStack enabled", + "documentation": "For region us-east-1 with FIPS disabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-3.api.aws" + "url": "https://monitoring.us-east-1.api.aws" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "ap-southeast-3" + "UseDualStack": true, + "Region": "us-east-1" } }, { - "documentation": "For region ap-southeast-3 with FIPS disabled and DualStack disabled", + "documentation": "For region cn-north-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-3.amazonaws.com" + "url": "https://monitoring.cn-north-1.amazonaws.com.cn" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "ap-southeast-3" + "UseDualStack": false, + "Region": "cn-north-1" } }, { - "documentation": "For region ap-southeast-4 with FIPS enabled and DualStack enabled", + "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-4.api.aws" + "url": "https://monitoring.cn-northwest-1.amazonaws.com.cn" } }, "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "ap-southeast-4" + "UseFIPS": false, + "UseDualStack": false, + "Region": "cn-northwest-1" } }, { - "documentation": "For region ap-southeast-4 with FIPS enabled and DualStack disabled", + "documentation": "For region cn-north-1 with FIPS enabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.ap-southeast-4.amazonaws.com" + "url": "https://monitoring-fips.cn-north-1.api.amazonwebservices.com.cn" } }, "params": { - "UseDualStack": false, "UseFIPS": true, - "Region": "ap-southeast-4" + "UseDualStack": true, + "Region": "cn-north-1" } }, { - "documentation": "For region ap-southeast-4 with FIPS disabled and DualStack enabled", + "documentation": "For region cn-north-1 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-4.api.aws" + "url": "https://monitoring-fips.cn-north-1.amazonaws.com.cn" } }, "params": { - "UseDualStack": true, - "UseFIPS": false, - "Region": "ap-southeast-4" + "UseFIPS": true, + "UseDualStack": false, + "Region": "cn-north-1" } }, { - "documentation": "For region ap-southeast-4 with FIPS disabled and DualStack disabled", + "documentation": "For region cn-north-1 with FIPS disabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring.ap-southeast-4.amazonaws.com" + "url": "https://monitoring.cn-north-1.api.amazonwebservices.com.cn" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "ap-southeast-4" + "UseDualStack": true, + "Region": "cn-north-1" } }, { - "documentation": "For region us-east-1 with FIPS enabled and DualStack enabled", + "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.us-east-1.api.aws" + "url": "https://monitoring.us-gov-east-1.amazonaws.com" } }, "params": { - "UseDualStack": true, - "UseFIPS": true, - "Region": "us-east-1" + "UseFIPS": false, + "UseDualStack": false, + "Region": "us-gov-east-1" } }, { - "documentation": "For region us-east-1 with FIPS enabled and DualStack disabled", + "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.us-east-1.amazonaws.com" + "url": "https://monitoring.us-gov-east-1.amazonaws.com" } }, "params": { - "UseDualStack": false, "UseFIPS": true, - "Region": "us-east-1" + "UseDualStack": false, + "Region": "us-gov-east-1" } }, { - "documentation": "For region us-east-1 with FIPS disabled and DualStack enabled", + "documentation": "For region us-gov-west-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-east-1.api.aws" + "url": "https://monitoring.us-gov-west-1.amazonaws.com" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "us-east-1" + "UseDualStack": false, + "Region": "us-gov-west-1" } }, { - "documentation": "For region us-east-1 with FIPS disabled and DualStack disabled", + "documentation": "For region us-gov-west-1 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-east-1.amazonaws.com" + "url": "https://monitoring.us-gov-west-1.amazonaws.com" } }, "params": { + "UseFIPS": true, "UseDualStack": false, - "UseFIPS": false, - "Region": "us-east-1" + "Region": "us-gov-west-1" } }, { - "documentation": "For region us-east-2 with FIPS enabled and DualStack enabled", + "documentation": "For region us-gov-east-1 with FIPS enabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.us-east-2.api.aws" + "url": "https://monitoring-fips.us-gov-east-1.api.aws" } }, "params": { - "UseDualStack": true, "UseFIPS": true, - "Region": "us-east-2" + "UseDualStack": true, + "Region": "us-gov-east-1" } }, { - "documentation": "For region us-east-2 with FIPS enabled and DualStack disabled", + "documentation": "For region us-gov-east-1 with FIPS disabled and DualStack enabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.us-east-2.amazonaws.com" + "url": "https://monitoring.us-gov-east-1.api.aws" } }, "params": { - "UseDualStack": false, - "UseFIPS": true, - "Region": "us-east-2" + "UseFIPS": false, + "UseDualStack": true, + "Region": "us-gov-east-1" } }, { - "documentation": "For region us-east-2 with FIPS disabled and DualStack enabled", + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-east-2.api.aws" + "url": "https://monitoring.us-iso-east-1.c2s.ic.gov" } }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "us-east-2" + "UseDualStack": false, + "Region": "us-iso-east-1" } }, { - "documentation": "For region us-east-2 with FIPS disabled and DualStack disabled", + "documentation": "For region us-iso-west-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-east-2.amazonaws.com" + "url": "https://monitoring.us-iso-west-1.c2s.ic.gov" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "us-east-2" + "UseDualStack": false, + "Region": "us-iso-west-1" } }, { - "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack enabled", + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", "expect": { - "endpoint": { - "url": "https://monitoring-fips.cn-northwest-1.api.amazonwebservices.com.cn" - } + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" }, "params": { - "UseDualStack": true, "UseFIPS": true, - "Region": "cn-northwest-1" + "UseDualStack": true, + "Region": "us-iso-east-1" } }, { - "documentation": "For region cn-northwest-1 with FIPS enabled and DualStack disabled", + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring-fips.cn-northwest-1.amazonaws.com.cn" + "url": "https://monitoring-fips.us-iso-east-1.c2s.ic.gov" } }, "params": { - "UseDualStack": false, "UseFIPS": true, - "Region": "cn-northwest-1" + "UseDualStack": false, + "Region": "us-iso-east-1" } }, { - "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack enabled", + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", "expect": { - "endpoint": { - "url": "https://monitoring.cn-northwest-1.api.amazonwebservices.com.cn" - } + "error": "DualStack is enabled but this partition does not support DualStack" }, "params": { - "UseDualStack": true, "UseFIPS": false, - "Region": "cn-northwest-1" + "UseDualStack": true, + "Region": "us-iso-east-1" } }, { - "documentation": "For region cn-northwest-1 with FIPS disabled and DualStack disabled", + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", "expect": { "endpoint": { - "url": "https://monitoring.cn-northwest-1.amazonaws.com.cn" + "url": "https://monitoring.us-isob-east-1.sc2s.sgov.gov" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "cn-northwest-1" + "UseDualStack": false, + "Region": "us-isob-east-1" } }, { @@ -1766,8 +587,8 @@ "error": "FIPS and DualStack are enabled, but this partition does not support one or both" }, "params": { - "UseDualStack": true, "UseFIPS": true, + "UseDualStack": true, "Region": "us-isob-east-1" } }, @@ -1779,8 +600,8 @@ } }, "params": { - "UseDualStack": false, "UseFIPS": true, + "UseDualStack": false, "Region": "us-isob-east-1" } }, @@ -1790,35 +611,35 @@ "error": "DualStack is enabled but this partition does not support DualStack" }, "params": { - "UseDualStack": true, "UseFIPS": false, + "UseDualStack": true, "Region": "us-isob-east-1" } }, { - "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", + "documentation": "For custom endpoint with region set and fips disabled and dualstack disabled", "expect": { "endpoint": { - "url": "https://monitoring.us-isob-east-1.sc2s.sgov.gov" + "url": "https://example.com" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "us-isob-east-1" + "UseDualStack": false, + "Region": "us-east-1", + "Endpoint": "https://example.com" } }, { - "documentation": "For custom endpoint with fips disabled and dualstack disabled", + "documentation": "For custom endpoint with region not set and fips disabled and dualstack disabled", "expect": { "endpoint": { "url": "https://example.com" } }, "params": { - "UseDualStack": false, "UseFIPS": false, - "Region": "us-east-1", + "UseDualStack": false, "Endpoint": "https://example.com" } }, @@ -1828,8 +649,8 @@ "error": "Invalid Configuration: FIPS and custom endpoint are not supported" }, "params": { - "UseDualStack": false, "UseFIPS": true, + "UseDualStack": false, "Region": "us-east-1", "Endpoint": "https://example.com" } @@ -1840,11 +661,17 @@ "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" }, "params": { - "UseDualStack": true, "UseFIPS": false, + "UseDualStack": true, "Region": "us-east-1", "Endpoint": "https://example.com" } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } } ], "version": "1.0" diff --git a/models/apis/rds/2014-10-31/api-2.json b/models/apis/rds/2014-10-31/api-2.json index 4f732b370e9..490e592dcfd 100644 --- a/models/apis/rds/2014-10-31/api-2.json +++ b/models/apis/rds/2014-10-31/api-2.json @@ -270,7 +270,8 @@ {"shape":"CustomDBEngineVersionAlreadyExistsFault"}, {"shape":"CustomDBEngineVersionQuotaExceededFault"}, {"shape":"Ec2ImagePropertiesNotSupportedFault"}, - {"shape":"KMSKeyNotAccessibleFault"} + {"shape":"KMSKeyNotAccessibleFault"}, + {"shape":"CreateCustomDBEngineVersionFault"} ] }, "CreateDBCluster":{ @@ -3083,6 +3084,17 @@ "BlueGreenDeployment":{"shape":"BlueGreenDeployment"} } }, + "CreateCustomDBEngineVersionFault":{ + "type":"structure", + "members":{ + }, + "error":{ + "code":"CreateCustomDBEngineVersionFault", + "httpStatusCode":400, + "senderFault":true + }, + "exception":true + }, "CreateCustomDBEngineVersionMessage":{ "type":"structure", "required":[ diff --git a/models/apis/rds/2014-10-31/docs-2.json b/models/apis/rds/2014-10-31/docs-2.json index 6692b47fa30..49e1155cdbf 100644 --- a/models/apis/rds/2014-10-31/docs-2.json +++ b/models/apis/rds/2014-10-31/docs-2.json @@ -17,7 +17,7 @@ "CopyOptionGroup": "

Copies the specified option group.

", "CreateBlueGreenDeployment": "

Creates a blue/green deployment.

A blue/green deployment creates a staging environment that copies the production environment. In a blue/green deployment, the blue environment is the current production environment. The green environment is the staging environment. The staging environment stays in sync with the current production environment using logical replication.

You can make changes to the databases in the green environment without affecting production workloads. For example, you can upgrade the major or minor DB engine version, change database parameters, or make schema changes in the staging environment. You can thoroughly test changes in the green environment. When ready, you can switch over the environments to promote the green environment to be the new production environment. The switchover typically takes under a minute.

For more information, see Using Amazon RDS Blue/Green Deployments for database updates in the Amazon RDS User Guide and Using Amazon RDS Blue/Green Deployments for database updates in the Amazon Aurora User Guide.

", "CreateCustomDBEngineVersion": "

Creates a custom DB engine version (CEV).

", - "CreateDBCluster": "

Creates a new Amazon Aurora DB cluster or Multi-AZ DB cluster.

You can use the ReplicationSourceIdentifier parameter to create an Amazon Aurora DB cluster as a read replica of another DB cluster or Amazon RDS MySQL or PostgreSQL DB instance. For more information about Amazon Aurora, see What is Amazon Aurora? in the Amazon Aurora User Guide.

You can also use the ReplicationSourceIdentifier parameter to create a Multi-AZ DB cluster read replica with an RDS for PostgreSQL DB instance as the source. For more information about Multi-AZ DB clusters, see Multi-AZ DB cluster deployments in the Amazon RDS User Guide.

", + "CreateDBCluster": "

Creates a new Amazon Aurora DB cluster or Multi-AZ DB cluster.

If you create an Aurora DB cluster, the request creates an empty cluster. You must explicitly create the writer instance for your DB cluster using the CreateDBInstance operation. If you create a Multi-AZ DB cluster, the request creates a writer and two reader DB instances for you, each in a different Availability Zone.

You can use the ReplicationSourceIdentifier parameter to create an Amazon Aurora DB cluster as a read replica of another DB cluster or Amazon RDS MySQL or PostgreSQL DB instance. For more information about Amazon Aurora, see What is Amazon Aurora? in the Amazon Aurora User Guide.

You can also use the ReplicationSourceIdentifier parameter to create a Multi-AZ DB cluster read replica with an RDS for PostgreSQL DB instance as the source. For more information about Multi-AZ DB clusters, see Multi-AZ DB cluster deployments in the Amazon RDS User Guide.

", "CreateDBClusterEndpoint": "

Creates a new custom endpoint and associates it with an Amazon Aurora DB cluster.

This action applies only to Aurora DB clusters.

", "CreateDBClusterParameterGroup": "

Creates a new DB cluster parameter group.

Parameters in a DB cluster parameter group apply to all of the instances in a DB cluster.

A DB cluster parameter group is initially created with the default parameters for the database engine used by instances in the DB cluster. To provide custom values for any of the parameters, you must modify the group after creating it using ModifyDBClusterParameterGroup. Once you've created a DB cluster parameter group, you need to associate it with your DB cluster using ModifyDBCluster.

When you associate a new DB cluster parameter group with a running Aurora DB cluster, reboot the DB instances in the DB cluster without failover for the new DB cluster parameter group and associated settings to take effect.

When you associate a new DB cluster parameter group with a running Multi-AZ DB cluster, reboot the DB cluster without failover for the new DB cluster parameter group and associated settings to take effect.

After you create a DB cluster parameter group, you should wait at least 5 minutes before creating your first DB cluster that uses that DB cluster parameter group as the default parameter group. This allows Amazon RDS to fully complete the create action before the DB cluster parameter group is used as the default for a new DB cluster. This is especially important for parameters that are critical when creating the default database for a DB cluster, such as the character set for the default database defined by the character_set_database parameter. You can use the Parameter Groups option of the Amazon RDS console or the DescribeDBClusterParameters operation to verify that your DB cluster parameter group has been created or modified.

For more information on Amazon Aurora, see What is Amazon Aurora? in the Amazon Aurora User Guide.

For more information on Multi-AZ DB clusters, see Multi-AZ DB cluster deployments in the Amazon RDS User Guide.

", "CreateDBClusterSnapshot": "

Creates a snapshot of a DB cluster.

For more information on Amazon Aurora, see What is Amazon Aurora? in the Amazon Aurora User Guide.

For more information on Multi-AZ DB clusters, see Multi-AZ DB cluster deployments in the Amazon RDS User Guide.

", @@ -136,7 +136,7 @@ "StartDBCluster": "

Starts an Amazon Aurora DB cluster that was stopped using the Amazon Web Services console, the stop-db-cluster CLI command, or the StopDBCluster action.

For more information, see Stopping and Starting an Aurora Cluster in the Amazon Aurora User Guide.

This action only applies to Aurora DB clusters.

", "StartDBInstance": "

Starts an Amazon RDS DB instance that was stopped using the Amazon Web Services console, the stop-db-instance CLI command, or the StopDBInstance action.

For more information, see Starting an Amazon RDS DB instance That Was Previously Stopped in the Amazon RDS User Guide.

This command doesn't apply to RDS Custom, Aurora MySQL, and Aurora PostgreSQL. For Aurora DB clusters, use StartDBCluster instead.

", "StartDBInstanceAutomatedBackupsReplication": "

Enables replication of automated backups to a different Amazon Web Services Region.

This command doesn't apply to RDS Custom.

For more information, see Replicating Automated Backups to Another Amazon Web Services Region in the Amazon RDS User Guide.

", - "StartExportTask": "

Starts an export of DB snapshot or DB cluster data to Amazon S3. The provided IAM role must have access to the S3 bucket.

You can't export snapshot data from RDS Custom DB instances.

You can't export cluster data from Multi-AZ DB clusters.

For more information on exporting DB snapshot data, see Exporting DB snapshot data to Amazon S3 in the Amazon RDS User Guide or Exporting DB cluster snapshot data to Amazon S3 in the Amazon Aurora User Guide.

For more information on exporting DB cluster data, see Exporting DB cluster data to Amazon S3 in the Amazon Aurora User Guide.

", + "StartExportTask": "

Starts an export of DB snapshot or DB cluster data to Amazon S3. The provided IAM role must have access to the S3 bucket.

You can't export snapshot data from RDS Custom DB instances.

You can't export cluster data from Multi-AZ DB clusters.

For more information on exporting DB snapshot data, see Exporting DB snapshot data to Amazon S3 in the Amazon RDS User Guide or Exporting DB cluster snapshot data to Amazon S3 in the Amazon Aurora User Guide.

For more information on exporting DB cluster data, see Exporting DB cluster data to Amazon S3 in the Amazon Aurora User Guide.

", "StopActivityStream": "

Stops a database activity stream that was started using the Amazon Web Services console, the start-activity-stream CLI command, or the StartActivityStream action.

For more information, see Monitoring Amazon Aurora with Database Activity Streams in the Amazon Aurora User Guide or Monitoring Amazon RDS with Database Activity Streams in the Amazon RDS User Guide.

", "StopDBCluster": "

Stops an Amazon Aurora DB cluster. When you stop a DB cluster, Aurora retains the DB cluster's metadata, including its endpoints and DB parameter groups. Aurora also retains the transaction logs so you can do a point-in-time restore if necessary.

For more information, see Stopping and Starting an Aurora Cluster in the Amazon Aurora User Guide.

This action only applies to Aurora DB clusters.

", "StopDBInstance": "

Stops an Amazon RDS DB instance. When you stop a DB instance, Amazon RDS retains the DB instance's metadata, including its endpoint, DB parameter group, and option group membership. Amazon RDS also retains the transaction logs so you can do a point-in-time restore if necessary.

For more information, see Stopping an Amazon RDS DB Instance Temporarily in the Amazon RDS User Guide.

This command doesn't apply to RDS Custom, Aurora MySQL, and Aurora PostgreSQL. For Aurora clusters, use StopDBCluster instead.

", @@ -803,6 +803,11 @@ "refs": { } }, + "CreateCustomDBEngineVersionFault": { + "base": "

An error occurred while trying to create the CEV.

", + "refs": { + } + }, "CreateCustomDBEngineVersionMessage": { "base": null, "refs": { @@ -2510,7 +2515,7 @@ "GlobalClusterMemberList": { "base": null, "refs": { - "GlobalCluster$GlobalClusterMembers": "

The list of cluster IDs for secondary clusters within the global database cluster. Currently limited to 1 item.

" + "GlobalCluster$GlobalClusterMembers": "

The list of primary and secondary clusters within the global database cluster.

" } }, "GlobalClusterNotFoundFault": { @@ -2624,7 +2629,7 @@ "CreateDBClusterMessage$BackupRetentionPeriod": "

The number of days for which automated backups are retained.

Default: 1

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$Port": "

The port number on which the instances in the DB cluster accept connections.

RDS for MySQL and Aurora MySQL

Default: 3306

Valid values: 1150-65535

RDS for PostgreSQL and Aurora PostgreSQL

Default: 5432

Valid values: 1150-65535

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$AllocatedStorage": "

The amount of storage in gibibytes (GiB) to allocate to each DB instance in the Multi-AZ DB cluster.

This setting is required to create a Multi-AZ DB cluster.

Valid for: Multi-AZ DB clusters only

", - "CreateDBClusterMessage$Iops": "

The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for each DB instance in the Multi-AZ DB cluster.

For information about valid IOPS values, see Amazon RDS Provisioned IOPS storage in the Amazon RDS User Guide.

This setting is required to create a Multi-AZ DB cluster.

Constraints: Must be a multiple between .5 and 50 of the storage amount for the DB cluster.

Valid for: Multi-AZ DB clusters only

", + "CreateDBClusterMessage$Iops": "

The amount of Provisioned IOPS (input/output operations per second) to be initially allocated for each DB instance in the Multi-AZ DB cluster.

For information about valid IOPS values, see Provisioned IOPS storage in the Amazon RDS User Guide.

This setting is required to create a Multi-AZ DB cluster.

Constraints: Must be a multiple between .5 and 50 of the storage amount for the DB cluster.

Valid for: Multi-AZ DB clusters only

", "CreateDBClusterMessage$MonitoringInterval": "

The interval, in seconds, between points when Enhanced Monitoring metrics are collected for the DB cluster. To turn off collecting Enhanced Monitoring metrics, specify 0. The default is 0.

If MonitoringRoleArn is specified, also set MonitoringInterval to a value other than 0.

Valid Values: 0, 1, 5, 10, 15, 30, 60

Valid for: Multi-AZ DB clusters only

", "CreateDBClusterMessage$PerformanceInsightsRetentionPeriod": "

The number of days to retain Performance Insights data. The default is 7 days. The following values are valid:

For example, the following values are valid:

If you specify a retention period such as 94, which isn't a valid value, RDS issues an error.

Valid for: Multi-AZ DB clusters only

", "CreateDBInstanceMessage$AllocatedStorage": "

The amount of storage in gibibytes (GiB) to allocate for the DB instance.

Type: Integer

Amazon Aurora

Not applicable. Aurora cluster volumes automatically grow as the amount of data in your database increases, though you are only charged for the space that you use in an Aurora cluster volume.

Amazon RDS Custom

Constraints to the amount of storage for each storage type are the following:

MySQL

Constraints to the amount of storage for each storage type are the following:

MariaDB

Constraints to the amount of storage for each storage type are the following:

PostgreSQL

Constraints to the amount of storage for each storage type are the following:

Oracle

Constraints to the amount of storage for each storage type are the following:

SQL Server

Constraints to the amount of storage for each storage type are the following:

", @@ -2930,7 +2935,7 @@ "DBInstance$EnabledCloudwatchLogsExports": "

A list of log types that this DB instance is configured to export to CloudWatch Logs.

Log types vary by DB engine. For information about the log types for each DB engine, see Amazon RDS Database Log Files in the Amazon RDS User Guide.

", "PendingCloudwatchLogsExports$LogTypesToEnable": "

Log types that are in the process of being deactivated. After they are deactivated, these log types aren't exported to CloudWatch Logs.

", "PendingCloudwatchLogsExports$LogTypesToDisable": "

Log types that are in the process of being enabled. After they are enabled, these log types are exported to CloudWatch Logs.

", - "RestoreDBClusterFromS3Message$EnableCloudwatchLogsExports": "

The list of logs that the restored DB cluster is to export to CloudWatch Logs. The values in the list depend on the DB engine being used.

Aurora MySQL

Possible values are audit, error, general, and slowquery.

Aurora PostgreSQL

Possible value is postgresql.

For more information about exporting CloudWatch Logs for Amazon Aurora, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon Aurora User Guide.

", + "RestoreDBClusterFromS3Message$EnableCloudwatchLogsExports": "

The list of logs that the restored DB cluster is to export to CloudWatch Logs. The values in the list depend on the DB engine being used.

Aurora MySQL

Possible values are audit, error, general, and slowquery.

For more information about exporting CloudWatch Logs for Amazon Aurora, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon Aurora User Guide.

", "RestoreDBClusterFromSnapshotMessage$EnableCloudwatchLogsExports": "

The list of logs that the restored DB cluster is to export to Amazon CloudWatch Logs. The values in the list depend on the DB engine being used.

RDS for MySQL

Possible values are error, general, and slowquery.

RDS for PostgreSQL

Possible values are postgresql and upgrade.

Aurora MySQL

Possible values are audit, error, general, and slowquery.

Aurora PostgreSQL

Possible value is postgresql.

For more information about exporting CloudWatch Logs for Amazon RDS, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon RDS User Guide.

For more information about exporting CloudWatch Logs for Amazon Aurora, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon Aurora User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterToPointInTimeMessage$EnableCloudwatchLogsExports": "

The list of logs that the restored DB cluster is to export to CloudWatch Logs. The values in the list depend on the DB engine being used.

RDS for MySQL

Possible values are error, general, and slowquery.

RDS for PostgreSQL

Possible values are postgresql and upgrade.

Aurora MySQL

Possible values are audit, error, general, and slowquery.

Aurora PostgreSQL

Possible value is postgresql.

For more information about exporting CloudWatch Logs for Amazon RDS, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon RDS User Guide.

For more information about exporting CloudWatch Logs for Amazon Aurora, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon Aurora User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBInstanceFromDBSnapshotMessage$EnableCloudwatchLogsExports": "

The list of logs that the restored DB instance is to export to CloudWatch Logs. The values in the list depend on the DB engine being used. For more information, see Publishing Database Logs to Amazon CloudWatch Logs in the Amazon RDS User Guide.

This setting doesn't apply to RDS Custom.

", @@ -3992,8 +3997,8 @@ "CreateDBClusterMessage$DBClusterIdentifier": "

The DB cluster identifier. This parameter is stored as a lowercase string.

Constraints:

Example: my-cluster1

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$DBClusterParameterGroupName": "

The name of the DB cluster parameter group to associate with this DB cluster. If you do not specify a value, then the default DB cluster parameter group for the specified DB engine and version is used.

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$DBSubnetGroupName": "

A DB subnet group to associate with this DB cluster.

This setting is required to create a Multi-AZ DB cluster.

Constraints: Must match the name of an existing DBSubnetGroup. Must not be default.

Example: mydbsubnetgroup

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", - "CreateDBClusterMessage$Engine": "

The name of the database engine to be used for this DB cluster.

Valid Values:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", - "CreateDBClusterMessage$EngineVersion": "

The version number of the database engine to use.

To list all of the available engine versions for MySQL 5.6-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

For information, see MySQL on Amazon RDS Versions in the Amazon Aurora User Guide.

Aurora PostgreSQL

For information, see Amazon Aurora PostgreSQL releases and engine versions in the Amazon Aurora User Guide.

MySQL

For information, see MySQL on Amazon RDS Versions in the Amazon RDS User Guide.

PostgreSQL

For information, see Amazon RDS for PostgreSQL versions and extensions in the Amazon RDS User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", + "CreateDBClusterMessage$Engine": "

The name of the database engine to be used for this DB cluster.

Valid Values:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", + "CreateDBClusterMessage$EngineVersion": "

The version number of the database engine to use.

To list all of the available engine versions for Aurora MySQL version 2 (5.7-compatible) and version 3 (MySQL 8.0-compatible), use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

You can supply either 5.7 or 8.0 to use the default engine version for Aurora MySQL version 2 or version 3, respectively.

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

For information, see Database engine updates for Amazon Aurora MySQL in the Amazon Aurora User Guide.

Aurora PostgreSQL

For information, see Amazon Aurora PostgreSQL releases and engine versions in the Amazon Aurora User Guide.

MySQL

For information, see Amazon RDS for MySQL in the Amazon RDS User Guide.

PostgreSQL

For information, see Amazon RDS for PostgreSQL in the Amazon RDS User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$MasterUsername": "

The name of the master user for the DB cluster.

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$MasterUserPassword": "

The password for the master database user. This password can contain any printable ASCII character except \"/\", \"\"\", or \"@\".

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$OptionGroupName": "

A value that indicates that the DB cluster should be associated with the specified option group.

DB clusters are associated with a default option group that can't be modified.

", @@ -4002,7 +4007,7 @@ "CreateDBClusterMessage$ReplicationSourceIdentifier": "

The Amazon Resource Name (ARN) of the source DB instance or DB cluster if this DB cluster is created as a read replica.

Valid for: Aurora DB clusters and RDS for PostgreSQL Multi-AZ DB clusters

", "CreateDBClusterMessage$KmsKeyId": "

The Amazon Web Services KMS key identifier for an encrypted DB cluster.

The Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. To use a KMS key in a different Amazon Web Services account, specify the key ARN or alias ARN.

When a KMS key isn't specified in KmsKeyId:

There is a default KMS key for your Amazon Web Services account. Your Amazon Web Services account has a different default KMS key for each Amazon Web Services Region.

If you create a read replica of an encrypted DB cluster in another Amazon Web Services Region, you must set KmsKeyId to a KMS key identifier that is valid in the destination Amazon Web Services Region. This KMS key is used to encrypt the read replica in that Amazon Web Services Region.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "CreateDBClusterMessage$PreSignedUrl": "

When you are replicating a DB cluster from one Amazon Web Services GovCloud (US) Region to another, an URL that contains a Signature Version 4 signed request for the CreateDBCluster operation to be called in the source Amazon Web Services Region where the DB cluster is replicated from. Specify PreSignedUrl only when you are performing cross-Region replication from an encrypted DB cluster.

The presigned URL must be a valid request for the CreateDBCluster API operation that can run in the source Amazon Web Services Region that contains the encrypted DB cluster to copy.

The presigned URL request must contain the following parameter values:

To learn how to generate a Signature Version 4 signed request, see Authenticating Requests: Using Query Parameters (Amazon Web Services Signature Version 4) and Signature Version 4 Signing Process.

If you are using an Amazon Web Services SDK tool or the CLI, you can specify SourceRegion (or --source-region for the CLI) instead of specifying PreSignedUrl manually. Specifying SourceRegion autogenerates a presigned URL that is a valid request for the operation that can run in the source Amazon Web Services Region.

Valid for: Aurora DB clusters only

", - "CreateDBClusterMessage$EngineMode": "

The DB engine mode of the DB cluster, either provisioned, serverless, parallelquery, global, or multimaster.

The parallelquery engine mode isn't required for Aurora MySQL version 1.23 and higher 1.x versions, and version 2.09 and higher 2.x versions.

The global engine mode isn't required for Aurora MySQL version 1.22 and higher 1.x versions, and global engine mode isn't required for any 2.x versions.

The multimaster engine mode only applies for DB clusters created with Aurora MySQL version 5.6.10a.

The serverless engine mode only applies for Aurora Serverless v1 DB clusters.

For Aurora PostgreSQL, the global engine mode isn't required, and both the parallelquery and the multimaster engine modes currently aren't supported.

Limitations and requirements apply to some DB engine modes. For more information, see the following sections in the Amazon Aurora User Guide:

Valid for: Aurora DB clusters only

", + "CreateDBClusterMessage$EngineMode": "

The DB engine mode of the DB cluster, either provisioned or serverless.

The serverless engine mode only applies for Aurora Serverless v1 DB clusters.

Limitations and requirements apply to some DB engine modes. For more information, see the following sections in the Amazon Aurora User Guide:

Valid for: Aurora DB clusters only

", "CreateDBClusterMessage$GlobalClusterIdentifier": "

The global cluster ID of an Aurora cluster that becomes the primary cluster in the new global database cluster.

Valid for: Aurora DB clusters only

", "CreateDBClusterMessage$Domain": "

The Active Directory directory ID to create the DB cluster in.

For Amazon Aurora DB clusters, Amazon RDS can use Kerberos authentication to authenticate users that connect to the DB cluster.

For more information, see Kerberos authentication in the Amazon Aurora User Guide.

Valid for: Aurora DB clusters only

", "CreateDBClusterMessage$DomainIAMRoleName": "

Specify the name of the IAM role to be used when making API calls to the Directory Service.

Valid for: Aurora DB clusters only

", @@ -4346,7 +4351,7 @@ "DescribeDBClusterSnapshotsMessage$Marker": "

An optional pagination token provided by a previous DescribeDBClusterSnapshots request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords.

", "DescribeDBClustersMessage$DBClusterIdentifier": "

The user-supplied DB cluster identifier or the Amazon Resource Name (ARN) of the DB cluster. If this parameter is specified, information from only the specific DB cluster is returned. This parameter isn't case-sensitive.

Constraints:

", "DescribeDBClustersMessage$Marker": "

An optional pagination token provided by a previous DescribeDBClusters request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords.

", - "DescribeDBEngineVersionsMessage$Engine": "

The database engine to return.

Valid Values:

", + "DescribeDBEngineVersionsMessage$Engine": "

The database engine to return.

Valid Values:

", "DescribeDBEngineVersionsMessage$EngineVersion": "

The database engine version to return.

Example: 5.1.49

", "DescribeDBEngineVersionsMessage$DBParameterGroupFamily": "

The name of a specific DB parameter group family to return details for.

Constraints:

", "DescribeDBEngineVersionsMessage$Marker": "

An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords.

", @@ -4410,7 +4415,7 @@ "DescribeOptionGroupsMessage$Marker": "

An optional pagination token provided by a previous DescribeOptionGroups request. If this parameter is specified, the response includes only records beyond the marker, up to the value specified by MaxRecords.

", "DescribeOptionGroupsMessage$EngineName": "

Filters the list of option groups to only include groups associated with a specific database engine.

Valid Values:

", "DescribeOptionGroupsMessage$MajorEngineVersion": "

Filters the list of option groups to only include groups associated with a specific database engine version. If specified, then EngineName must also be specified.

", - "DescribeOrderableDBInstanceOptionsMessage$Engine": "

The name of the engine to retrieve DB instance options for.

Valid Values:

", + "DescribeOrderableDBInstanceOptionsMessage$Engine": "

The name of the engine to retrieve DB instance options for.

Valid Values:

", "DescribeOrderableDBInstanceOptionsMessage$EngineVersion": "

The engine version filter value. Specify this parameter to show only the available offerings matching the specified engine version.

", "DescribeOrderableDBInstanceOptionsMessage$DBInstanceClass": "

The DB instance class filter value. Specify this parameter to show only the available offerings matching the specified DB instance class.

", "DescribeOrderableDBInstanceOptionsMessage$LicenseModel": "

The license model filter value. Specify this parameter to show only the available offerings matching the specified license model.

RDS Custom supports only the BYOL licensing model.

", @@ -4518,7 +4523,7 @@ "ModifyDBClusterMessage$OptionGroupName": "

A value that indicates that the DB cluster should be associated with the specified option group.

DB clusters are associated with a default option group that can't be modified.

", "ModifyDBClusterMessage$PreferredBackupWindow": "

The daily time range during which automated backups are created if automated backups are enabled, using the BackupRetentionPeriod parameter.

The default is a 30-minute window selected at random from an 8-hour block of time for each Amazon Web Services Region. To view the time blocks available, see Backup window in the Amazon Aurora User Guide.

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "ModifyDBClusterMessage$PreferredMaintenanceWindow": "

The weekly time range during which system maintenance can occur, in Universal Coordinated Time (UTC).

Format: ddd:hh24:mi-ddd:hh24:mi

The default is a 30-minute window selected at random from an 8-hour block of time for each Amazon Web Services Region, occurring on a random day of the week. To see the time blocks available, see Adjusting the Preferred DB Cluster Maintenance Window in the Amazon Aurora User Guide.

Valid Days: Mon, Tue, Wed, Thu, Fri, Sat, Sun.

Constraints: Minimum 30-minute window.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", - "ModifyDBClusterMessage$EngineVersion": "

The version number of the database engine to which you want to upgrade. Changing this parameter results in an outage. The change is applied during the next maintenance window unless ApplyImmediately is enabled.

To list all of the available engine versions for MySQL 5.6-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", + "ModifyDBClusterMessage$EngineVersion": "

The version number of the database engine to which you want to upgrade. Changing this parameter results in an outage. The change is applied during the next maintenance window unless ApplyImmediately is enabled.

To list all of the available engine versions for Aurora MySQL version 2 (5.7-compatible) and version 3 (MySQL 8.0-compatible), use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "ModifyDBClusterMessage$DBInstanceParameterGroupName": "

The name of the DB parameter group to apply to all instances of the DB cluster.

When you apply a parameter group using the DBInstanceParameterGroupName parameter, the DB cluster isn't rebooted automatically. Also, parameter changes are applied immediately rather than during the next maintenance window.

Default: The existing name setting

Constraints:

Valid for: Aurora DB clusters only

", "ModifyDBClusterMessage$Domain": "

The Active Directory directory ID to move the DB cluster to. Specify none to remove the cluster from its current domain. The domain must be created prior to this operation.

For more information, see Kerberos Authentication in the Amazon Aurora User Guide.

Valid for: Aurora DB clusters only

", "ModifyDBClusterMessage$DomainIAMRoleName": "

Specify the name of the IAM role to be used when making API calls to the Directory Service.

Valid for: Aurora DB clusters only

", @@ -4690,8 +4695,8 @@ "RestoreDBClusterFromS3Message$DBClusterIdentifier": "

The name of the DB cluster to create from the source data in the Amazon S3 bucket. This parameter isn't case-sensitive.

Constraints:

Example: my-cluster1

", "RestoreDBClusterFromS3Message$DBClusterParameterGroupName": "

The name of the DB cluster parameter group to associate with the restored DB cluster. If this argument is omitted, default.aurora5.6 is used.

Constraints:

", "RestoreDBClusterFromS3Message$DBSubnetGroupName": "

A DB subnet group to associate with the restored DB cluster.

Constraints: If supplied, must match the name of an existing DBSubnetGroup.

Example: mydbsubnetgroup

", - "RestoreDBClusterFromS3Message$Engine": "

The name of the database engine to be used for this DB cluster.

Valid Values: aurora (for MySQL 5.6-compatible Aurora) and aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora)

", - "RestoreDBClusterFromS3Message$EngineVersion": "

The version number of the database engine to use.

To list all of the available engine versions for aurora (for MySQL 5.6-compatible Aurora), use the following command:

aws rds describe-db-engine-versions --engine aurora --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora), use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

Example: 5.6.10a, 5.6.mysql_aurora.1.19.2, 5.7.mysql_aurora.2.07.1, 8.0.mysql_aurora.3.02.0

", + "RestoreDBClusterFromS3Message$Engine": "

The name of the database engine to be used for this DB cluster.

Valid Values: aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora)

", + "RestoreDBClusterFromS3Message$EngineVersion": "

The version number of the database engine to use.

To list all of the available engine versions for aurora-mysql (MySQL 5.7-compatible and MySQL 8.0-compatible Aurora), use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

Examples: 5.7.mysql_aurora.2.07.1, 8.0.mysql_aurora.3.02.0

", "RestoreDBClusterFromS3Message$MasterUsername": "

The name of the master user for the restored DB cluster.

Constraints:

", "RestoreDBClusterFromS3Message$MasterUserPassword": "

The password for the master database user. This password can contain any printable ASCII character except \"/\", \"\"\", or \"@\".

Constraints:

", "RestoreDBClusterFromS3Message$OptionGroupName": "

A value that indicates that the restored DB cluster should be associated with the specified option group.

Permanent options can't be removed from an option group. An option group can't be removed from a DB cluster once it is associated with a DB cluster.

", @@ -4710,7 +4715,7 @@ "RestoreDBClusterFromSnapshotMessage$DBClusterIdentifier": "

The name of the DB cluster to create from the DB snapshot or DB cluster snapshot. This parameter isn't case-sensitive.

Constraints:

Example: my-snapshot-id

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterFromSnapshotMessage$SnapshotIdentifier": "

The identifier for the DB snapshot or DB cluster snapshot to restore from.

You can use either the name or the Amazon Resource Name (ARN) to specify a DB cluster snapshot. However, you can use only the ARN to specify a DB snapshot.

Constraints:

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterFromSnapshotMessage$Engine": "

The database engine to use for the new DB cluster.

Default: The same as source

Constraint: Must be compatible with the engine of the source

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", - "RestoreDBClusterFromSnapshotMessage$EngineVersion": "

The version of the database engine to use for the new DB cluster.

To list all of the available engine versions for MySQL 5.6-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

See MySQL on Amazon RDS Versions in the Amazon Aurora User Guide.

Aurora PostgreSQL

See Amazon Aurora PostgreSQL releases and engine versions in the Amazon Aurora User Guide.

MySQL

See MySQL on Amazon RDS Versions in the Amazon RDS User Guide.

PostgreSQL

See Amazon RDS for PostgreSQL versions and extensions in the Amazon RDS User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", + "RestoreDBClusterFromSnapshotMessage$EngineVersion": "

The version of the database engine to use for the new DB cluster. If you don't specify an engine version, the default version for the database engine in the Amazon Web Services Region is used.

To list all of the available engine versions for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora, use the following command:

aws rds describe-db-engine-versions --engine aurora-mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for Aurora PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine aurora-postgresql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for MySQL, use the following command:

aws rds describe-db-engine-versions --engine mysql --query \"DBEngineVersions[].EngineVersion\"

To list all of the available engine versions for RDS for PostgreSQL, use the following command:

aws rds describe-db-engine-versions --engine postgres --query \"DBEngineVersions[].EngineVersion\"

Aurora MySQL

See Database engine updates for Amazon Aurora MySQL in the Amazon Aurora User Guide.

Aurora PostgreSQL

See Amazon Aurora PostgreSQL releases and engine versions in the Amazon Aurora User Guide.

MySQL

See Amazon RDS for MySQL in the Amazon RDS User Guide.

PostgreSQL

See Amazon RDS for PostgreSQL versions and extensions in the Amazon RDS User Guide.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterFromSnapshotMessage$DBSubnetGroupName": "

The name of the DB subnet group to use for the new DB cluster.

Constraints: If supplied, must match the name of an existing DB subnet group.

Example: mydbsubnetgroup

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterFromSnapshotMessage$DatabaseName": "

The database name for the restored DB cluster.

Valid for: Aurora DB clusters and Multi-AZ DB clusters

", "RestoreDBClusterFromSnapshotMessage$OptionGroupName": "

The name of the option group to use for the restored DB cluster.

DB clusters are associated with a default option group that can't be modified.

", @@ -4823,7 +4828,7 @@ "StartExportTaskMessage$ExportTaskIdentifier": "

A unique identifier for the export task. This ID isn't an identifier for the Amazon S3 bucket where the data is to be exported.

", "StartExportTaskMessage$SourceArn": "

The Amazon Resource Name (ARN) of the snapshot or cluster to export to Amazon S3.

", "StartExportTaskMessage$S3BucketName": "

The name of the Amazon S3 bucket to export the snapshot or cluster data to.

", - "StartExportTaskMessage$IamRoleArn": "

The name of the IAM role to use for writing to the Amazon S3 bucket when exporting a snapshot or cluster.

", + "StartExportTaskMessage$IamRoleArn": "

The name of the IAM role to use for writing to the Amazon S3 bucket when exporting a snapshot or cluster.

In the IAM policy attached to your IAM role, include the following required actions to allow the transfer of files from Amazon RDS or Amazon Aurora to an S3 bucket:

In the policy, include the resources to identify the S3 bucket and objects in the bucket. The following list of resources shows the Amazon Resource Name (ARN) format for accessing S3:

", "StartExportTaskMessage$KmsKeyId": "

The ID of the Amazon Web Services KMS key to use to encrypt the data exported to Amazon S3. The Amazon Web Services KMS key identifier is the key ARN, key ID, alias ARN, or alias name for the KMS key. The caller of this operation must be authorized to run the following operations. These can be set in the Amazon Web Services KMS key policy:

", "StartExportTaskMessage$S3Prefix": "

The Amazon S3 bucket prefix to use as the file name and path of the exported data.

", "StopActivityStreamRequest$ResourceArn": "

The Amazon Resource Name (ARN) of the DB cluster for the database activity stream. For example, arn:aws:rds:us-east-1:12345667890:cluster:das-cluster.

", @@ -4959,7 +4964,7 @@ } }, "SwitchoverDetail": { - "base": "

Contains the details about a blue/green deployment.

For more information, see Using Amazon RDS Blue/Green Deployments for database updates in the Amazon RDS User Guide and Using Amazon RDS Blue/Green Deployments for database updates in the Amazon Aurora User Guide.

", + "base": "

Contains the details about a blue/green deployment.

For more information, see Using Amazon RDS Blue/Green Deployments for database updates in the Amazon RDS User Guide and Using Amazon RDS Blue/Green Deployments for database updates in the Amazon Aurora User Guide.

", "refs": { "SwitchoverDetailList$member": null } @@ -4973,7 +4978,7 @@ "SwitchoverDetailStatus": { "base": null, "refs": { - "SwitchoverDetail$Status": "

The switchover status of a resource in a blue/green deployment.

Values:

" + "SwitchoverDetail$Status": "

The switchover status of a resource in a blue/green deployment.

Values:

" } }, "SwitchoverReadReplicaMessage": { diff --git a/models/apis/rds/2014-10-31/endpoint-tests-1.json b/models/apis/rds/2014-10-31/endpoint-tests-1.json index 4bcb5a33082..239943d8395 100644 --- a/models/apis/rds/2014-10-31/endpoint-tests-1.json +++ b/models/apis/rds/2014-10-31/endpoint-tests-1.json @@ -8,8 +8,8 @@ } }, "params": { - "Region": "af-south-1", "UseDualStack": false, + "Region": "af-south-1", "UseFIPS": false } }, @@ -21,8 +21,8 @@ } }, "params": { - "Region": "ap-east-1", "UseDualStack": false, + "Region": "ap-east-1", "UseFIPS": false } }, @@ -34,8 +34,8 @@ } }, "params": { - "Region": "ap-northeast-1", "UseDualStack": false, + "Region": "ap-northeast-1", "UseFIPS": false } }, @@ -47,8 +47,8 @@ } }, "params": { - "Region": "ap-northeast-2", "UseDualStack": false, + "Region": "ap-northeast-2", "UseFIPS": false } }, @@ -60,8 +60,8 @@ } }, "params": { - "Region": "ap-northeast-3", "UseDualStack": false, + "Region": "ap-northeast-3", "UseFIPS": false } }, @@ -73,8 +73,8 @@ } }, "params": { - "Region": "ap-south-1", "UseDualStack": false, + "Region": "ap-south-1", "UseFIPS": false } }, @@ -86,8 +86,8 @@ } }, "params": { - "Region": "ap-southeast-1", "UseDualStack": false, + "Region": "ap-southeast-1", "UseFIPS": false } }, @@ -99,8 +99,8 @@ } }, "params": { - "Region": "ap-southeast-2", "UseDualStack": false, + "Region": "ap-southeast-2", "UseFIPS": false } }, @@ -112,8 +112,8 @@ } }, "params": { - "Region": "ap-southeast-3", "UseDualStack": false, + "Region": "ap-southeast-3", "UseFIPS": false } }, @@ -125,8 +125,8 @@ } }, "params": { - "Region": "ca-central-1", "UseDualStack": false, + "Region": "ca-central-1", "UseFIPS": false } }, @@ -138,8 +138,8 @@ } }, "params": { - "Region": "ca-central-1", "UseDualStack": false, + "Region": "ca-central-1", "UseFIPS": true } }, @@ -151,8 +151,8 @@ } }, "params": { - "Region": "eu-central-1", "UseDualStack": false, + "Region": "eu-central-1", "UseFIPS": false } }, @@ -164,8 +164,8 @@ } }, "params": { - "Region": "eu-north-1", "UseDualStack": false, + "Region": "eu-north-1", "UseFIPS": false } }, @@ -177,8 +177,8 @@ } }, "params": { - "Region": "eu-south-1", "UseDualStack": false, + "Region": "eu-south-1", "UseFIPS": false } }, @@ -190,8 +190,8 @@ } }, "params": { - "Region": "eu-west-1", "UseDualStack": false, + "Region": "eu-west-1", "UseFIPS": false } }, @@ -203,8 +203,8 @@ } }, "params": { - "Region": "eu-west-2", "UseDualStack": false, + "Region": "eu-west-2", "UseFIPS": false } }, @@ -216,8 +216,8 @@ } }, "params": { - "Region": "eu-west-3", "UseDualStack": false, + "Region": "eu-west-3", "UseFIPS": false } }, @@ -229,8 +229,8 @@ } }, "params": { - "Region": "me-south-1", "UseDualStack": false, + "Region": "me-south-1", "UseFIPS": false } }, @@ -242,8 +242,8 @@ } }, "params": { - "Region": "sa-east-1", "UseDualStack": false, + "Region": "sa-east-1", "UseFIPS": false } }, @@ -255,8 +255,8 @@ } }, "params": { - "Region": "us-east-1", "UseDualStack": false, + "Region": "us-east-1", "UseFIPS": false } }, @@ -268,8 +268,8 @@ } }, "params": { - "Region": "us-east-1", "UseDualStack": false, + "Region": "us-east-1", "UseFIPS": true } }, @@ -281,8 +281,8 @@ } }, "params": { - "Region": "us-east-2", "UseDualStack": false, + "Region": "us-east-2", "UseFIPS": false } }, @@ -294,8 +294,8 @@ } }, "params": { - "Region": "us-east-2", "UseDualStack": false, + "Region": "us-east-2", "UseFIPS": true } }, @@ -307,8 +307,8 @@ } }, "params": { - "Region": "us-west-1", "UseDualStack": false, + "Region": "us-west-1", "UseFIPS": false } }, @@ -320,8 +320,8 @@ } }, "params": { - "Region": "us-west-1", "UseDualStack": false, + "Region": "us-west-1", "UseFIPS": true } }, @@ -333,8 +333,8 @@ } }, "params": { - "Region": "us-west-2", "UseDualStack": false, + "Region": "us-west-2", "UseFIPS": false } }, @@ -346,8 +346,8 @@ } }, "params": { - "Region": "us-west-2", "UseDualStack": false, + "Region": "us-west-2", "UseFIPS": true } }, @@ -359,8 +359,8 @@ } }, "params": { - "Region": "us-east-1", "UseDualStack": true, + "Region": "us-east-1", "UseFIPS": true } }, @@ -372,8 +372,8 @@ } }, "params": { - "Region": "us-east-1", "UseDualStack": true, + "Region": "us-east-1", "UseFIPS": false } }, @@ -385,8 +385,8 @@ } }, "params": { - "Region": "cn-north-1", "UseDualStack": false, + "Region": "cn-north-1", "UseFIPS": false } }, @@ -398,8 +398,8 @@ } }, "params": { - "Region": "cn-northwest-1", "UseDualStack": false, + "Region": "cn-northwest-1", "UseFIPS": false } }, @@ -411,8 +411,8 @@ } }, "params": { - "Region": "cn-north-1", "UseDualStack": true, + "Region": "cn-north-1", "UseFIPS": true } }, @@ -424,8 +424,8 @@ } }, "params": { - "Region": "cn-north-1", "UseDualStack": false, + "Region": "cn-north-1", "UseFIPS": true } }, @@ -437,8 +437,8 @@ } }, "params": { - "Region": "cn-north-1", "UseDualStack": true, + "Region": "cn-north-1", "UseFIPS": false } }, @@ -450,8 +450,8 @@ } }, "params": { - "Region": "us-gov-east-1", "UseDualStack": false, + "Region": "us-gov-east-1", "UseFIPS": false } }, @@ -463,8 +463,8 @@ } }, "params": { - "Region": "us-gov-east-1", "UseDualStack": false, + "Region": "us-gov-east-1", "UseFIPS": true } }, @@ -476,8 +476,8 @@ } }, "params": { - "Region": "us-gov-west-1", "UseDualStack": false, + "Region": "us-gov-west-1", "UseFIPS": false } }, @@ -489,8 +489,8 @@ } }, "params": { - "Region": "us-gov-west-1", "UseDualStack": false, + "Region": "us-gov-west-1", "UseFIPS": true } }, @@ -502,8 +502,8 @@ } }, "params": { - "Region": "us-gov-east-1", "UseDualStack": true, + "Region": "us-gov-east-1", "UseFIPS": true } }, @@ -515,8 +515,8 @@ } }, "params": { - "Region": "us-gov-east-1", "UseDualStack": true, + "Region": "us-gov-east-1", "UseFIPS": false } }, @@ -528,8 +528,8 @@ } }, "params": { - "Region": "us-iso-east-1", "UseDualStack": false, + "Region": "us-iso-east-1", "UseFIPS": false } }, @@ -541,11 +541,22 @@ } }, "params": { - "Region": "us-iso-west-1", "UseDualStack": false, + "Region": "us-iso-west-1", "UseFIPS": false } }, + { + "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "UseDualStack": true, + "Region": "us-iso-east-1", + "UseFIPS": true + } + }, { "documentation": "For region us-iso-east-1 with FIPS enabled and DualStack disabled", "expect": { @@ -554,11 +565,22 @@ } }, "params": { - "Region": "us-iso-east-1", "UseDualStack": false, + "Region": "us-iso-east-1", "UseFIPS": true } }, + { + "documentation": "For region us-iso-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "UseDualStack": true, + "Region": "us-iso-east-1", + "UseFIPS": false + } + }, { "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack disabled", "expect": { @@ -567,11 +589,22 @@ } }, "params": { - "Region": "us-isob-east-1", "UseDualStack": false, + "Region": "us-isob-east-1", "UseFIPS": false } }, + { + "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack enabled", + "expect": { + "error": "FIPS and DualStack are enabled, but this partition does not support one or both" + }, + "params": { + "UseDualStack": true, + "Region": "us-isob-east-1", + "UseFIPS": true + } + }, { "documentation": "For region us-isob-east-1 with FIPS enabled and DualStack disabled", "expect": { @@ -580,11 +613,22 @@ } }, "params": { - "Region": "us-isob-east-1", "UseDualStack": false, + "Region": "us-isob-east-1", "UseFIPS": true } }, + { + "documentation": "For region us-isob-east-1 with FIPS disabled and DualStack enabled", + "expect": { + "error": "DualStack is enabled but this partition does not support DualStack" + }, + "params": { + "UseDualStack": true, + "Region": "us-isob-east-1", + "UseFIPS": false + } + }, { "documentation": "For custom endpoint with region set and fips disabled and dualstack disabled", "expect": { @@ -593,8 +637,8 @@ } }, "params": { - "Region": "us-east-1", "UseDualStack": false, + "Region": "us-east-1", "UseFIPS": false, "Endpoint": "https://example.com" } @@ -618,8 +662,8 @@ "error": "Invalid Configuration: FIPS and custom endpoint are not supported" }, "params": { - "Region": "us-east-1", "UseDualStack": false, + "Region": "us-east-1", "UseFIPS": true, "Endpoint": "https://example.com" } @@ -630,11 +674,17 @@ "error": "Invalid Configuration: Dualstack and custom endpoint are not supported" }, "params": { - "Region": "us-east-1", "UseDualStack": true, + "Region": "us-east-1", "UseFIPS": false, "Endpoint": "https://example.com" } + }, + { + "documentation": "Missing region", + "expect": { + "error": "Invalid Configuration: Missing Region" + } } ], "version": "1.0" diff --git a/models/endpoints/endpoints.json b/models/endpoints/endpoints.json index a0a4d63371a..609333b8948 100644 --- a/models/endpoints/endpoints.json +++ b/models/endpoints/endpoints.json @@ -8301,6 +8301,17 @@ "us-west-2" : { } } }, + "ivsrealtime" : { + "endpoints" : { + "ap-northeast-1" : { }, + "ap-northeast-2" : { }, + "ap-south-1" : { }, + "eu-central-1" : { }, + "eu-west-1" : { }, + "us-east-1" : { }, + "us-west-2" : { } + } + }, "kafka" : { "endpoints" : { "af-south-1" : { }, diff --git a/service/cloudwatch/api.go b/service/cloudwatch/api.go index d2af5cbb2b8..67bed0e11ca 100644 --- a/service/cloudwatch/api.go +++ b/service/cloudwatch/api.go @@ -62,7 +62,10 @@ func (c *CloudWatch) DeleteAlarmsRequest(input *DeleteAlarmsInput) (req *request // you could delete 99 metric alarms and one composite alarms with one operation, // but you can't delete two composite alarms with one operation. // -// In the event of an error, no alarms are deleted. +// If you specify an incorrect alarm name or make any other error in the operation, +// no alarms are deleted. To confirm that alarms were deleted successfully, +// you can use the DescribeAlarms (https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_DescribeAlarms.html) +// operation after using DeleteAlarms. // // It is possible to create a loop or cycle of composite alarms, where composite // alarm A depends on composite alarm B, and composite alarm B also depends @@ -3394,10 +3397,10 @@ func (c *CloudWatch) PutMetricAlarmRequest(input *PutMetricAlarmInput) (req *req // If you are an IAM user, you must have Amazon EC2 permissions for some alarm // operations: // -// - The iam:CreateServiceLinkedRole for all alarms with EC2 actions +// - The iam:CreateServiceLinkedRole permission for all alarms with EC2 actions // -// - The iam:CreateServiceLinkedRole to create an alarm with Systems Manager -// OpsItem actions. +// - The iam:CreateServiceLinkedRole permissions to create an alarm with +// Systems Manager OpsItem or response plan actions. // // The first time you create an alarm in the Amazon Web Services Management // Console, the CLI, or by using the PutMetricAlarm API, CloudWatch creates @@ -4860,7 +4863,7 @@ func (s *Datapoint) SetUnit(v string) *Datapoint { type DeleteAlarmsInput struct { _ struct{} `type:"structure"` - // The alarms to be deleted. + // The alarms to be deleted. Do not enclose the alarm names in quote marks. // // AlarmNames is a required field AlarmNames []*string `type:"list" required:"true"` @@ -6110,13 +6113,15 @@ type Dimension struct { // The name of the dimension. Dimension names must contain only ASCII characters, // must include at least one non-whitespace character, and cannot start with - // a colon (:). + // a colon (:). ASCII control characters are not supported as part of dimension + // names. // // Name is a required field Name *string `min:"1" type:"string" required:"true"` // The value of the dimension. Dimension values must contain only ASCII characters - // and must include at least one non-whitespace character. + // and must include at least one non-whitespace character. ASCII control characters + // are not supported as part of dimension values. // // Value is a required field Value *string `min:"1" type:"string" required:"true"` @@ -9949,6 +9954,9 @@ func (s *MetricStreamEntry) SetState(v string) *MetricStreamEntry { // This structure contains the name of one of the metric namespaces that is // listed in a filter of a metric stream. +// +// The namespace can contain only ASCII printable characters (ASCII range 32 +// through 126). It must contain at least one non-whitespace character. type MetricStreamFilter struct { _ struct{} `type:"structure"` @@ -10997,16 +11005,39 @@ type PutMetricAlarmInput struct { // The actions to execute when this alarm transitions to the ALARM state from // any other state. Each action is specified as an Amazon Resource Name (ARN). + // Valid values: + // + // EC2 actions: + // + // * arn:aws:automate:region:ec2:stop + // + // * arn:aws:automate:region:ec2:terminate + // + // * arn:aws:automate:region:ec2:reboot + // + // * arn:aws:automate:region:ec2:recover + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0 + // + // Autoscaling action: + // + // * arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name // - // Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate - // | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot - // | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name - // | arn:aws:ssm:region:account-id:opsitem:severity | arn:aws:ssm-incidents::account-id:response-plan:response-plan-name + // SSN notification action: // - // Valid Values (for use with IAM roles): arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0 + // * arn:aws:sns:region:account-id:sns-topic-name:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // + // SSM integration actions: + // + // * arn:aws:ssm:region:account-id:opsitem:severity#CATEGORY=category-name + // + // * arn:aws:ssm-incidents::account-id:responseplan/response-plan-name AlarmActions []*string `type:"list"` // The description for the alarm. @@ -11014,6 +11045,9 @@ type PutMetricAlarmInput struct { // The name for the alarm. This name must be unique within the Region. // + // The name must contain only UTF-8 characters, and can't contain ASCII control + // characters + // // AlarmName is a required field AlarmName *string `min:"1" type:"string" required:"true"` @@ -11065,15 +11099,39 @@ type PutMetricAlarmInput struct { // The actions to execute when this alarm transitions to the INSUFFICIENT_DATA // state from any other state. Each action is specified as an Amazon Resource - // Name (ARN). + // Name (ARN). Valid values: + // + // EC2 actions: + // + // * arn:aws:automate:region:ec2:stop + // + // * arn:aws:automate:region:ec2:terminate + // + // * arn:aws:automate:region:ec2:reboot + // + // * arn:aws:automate:region:ec2:recover + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 // - // Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate - // | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot - // | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 // - // Valid Values (for use with IAM roles): >arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0 + // + // Autoscaling action: + // + // * arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // + // SSN notification action: + // + // * arn:aws:sns:region:account-id:sns-topic-name:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // + // SSM integration actions: + // + // * arn:aws:ssm:region:account-id:opsitem:severity#CATEGORY=category-name + // + // * arn:aws:ssm-incidents::account-id:responseplan/response-plan-name InsufficientDataActions []*string `type:"list"` // The name for the metric associated with the alarm. For each PutMetricAlarm @@ -11106,16 +11164,40 @@ type PutMetricAlarmInput struct { Namespace *string `min:"1" type:"string"` // The actions to execute when this alarm transitions to an OK state from any - // other state. Each action is specified as an Amazon Resource Name (ARN). + // other state. Each action is specified as an Amazon Resource Name (ARN). Valid + // values: + // + // EC2 actions: + // + // * arn:aws:automate:region:ec2:stop + // + // * arn:aws:automate:region:ec2:terminate + // + // * arn:aws:automate:region:ec2:reboot + // + // * arn:aws:automate:region:ec2:recover + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 + // + // * arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0 + // + // Autoscaling action: + // + // * arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // + // SSN notification action: + // + // * arn:aws:sns:region:account-id:sns-topic-name:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // + // SSM integration actions: // - // Valid Values: arn:aws:automate:region:ec2:stop | arn:aws:automate:region:ec2:terminate - // | arn:aws:automate:region:ec2:recover | arn:aws:automate:region:ec2:reboot - // | arn:aws:sns:region:account-id:sns-topic-name | arn:aws:autoscaling:region:account-id:scalingPolicy:policy-id:autoScalingGroupName/group-friendly-name:policyName/policy-friendly-name + // * arn:aws:ssm:region:account-id:opsitem:severity#CATEGORY=category-name // - // Valid Values (for use with IAM roles): arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Stop/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Terminate/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Reboot/1.0 - // | arn:aws:swf:region:account-id:action/actions/AWS_EC2.InstanceId.Recover/1.0 + // * arn:aws:ssm-incidents::account-id:responseplan/response-plan-name OKActions []*string `type:"list"` // The length, in seconds, used each time the metric specified in MetricName @@ -11463,7 +11545,8 @@ type PutMetricDataInput struct { // MetricData is a required field MetricData []*MetricDatum `type:"list" required:"true"` - // The namespace for the metric data. + // The namespace for the metric data. You can use ASCII characters for the namespace, + // except for control characters which are not supported. // // To avoid conflicts with Amazon Web Services service namespaces, you should // not specify a namespace that begins with AWS/ diff --git a/service/comprehend/api.go b/service/comprehend/api.go index 0a9d26c001b..3b28a56f444 100644 --- a/service/comprehend/api.go +++ b/service/comprehend/api.go @@ -1271,10 +1271,10 @@ func (c *Comprehend) CreateFlywheelRequest(input *CreateFlywheelInput) (req *req // CreateFlywheel API operation for Amazon Comprehend. // -// A flywheel is an AWS resource that orchestrates the ongoing training of a -// model for custom classification or custom entity recognition. You can create -// a flywheel to start with an existing trained model, or Comprehend can create -// and train a new model. +// A flywheel is an Amazon Web Services resource that orchestrates the ongoing +// training of a model for custom classification or custom entity recognition. +// You can create a flywheel to start with an existing trained model, or Comprehend +// can create and train a new model. // // When you create the flywheel, Comprehend creates a data lake in your account. // The data lake holds the training data and test data for all versions of the @@ -3999,13 +3999,14 @@ func (c *Comprehend) ImportModelRequest(input *ImportModelInput) (req *request.R // ImportModel API operation for Amazon Comprehend. // // Creates a new custom model that replicates a source custom model that you -// import. The source model can be in your AWS account or another one. +// import. The source model can be in your Amazon Web Services account or another +// one. // -// If the source model is in another AWS account, then it must have a resource-based -// policy that authorizes you to import it. +// If the source model is in another Amazon Web Services account, then it must +// have a resource-based policy that authorizes you to import it. // -// The source model must be in the same AWS region that you're using when you -// import. You can't import a model that's in a different region. +// The source model must be in the same Amazon Web Services Region that you're +// using when you import. You can't import a model that's in a different Region. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -4121,7 +4122,7 @@ func (c *Comprehend) ListDatasetsRequest(input *ListDatasetsInput) (req *request // ListDatasets API operation for Amazon Comprehend. // -// List the datasets that you have configured in this region. For more information +// List the datasets that you have configured in this Region. For more information // about datasets, see Flywheel overview (https://docs.aws.amazon.com/comprehend/latest/dg/flywheels-about.html) // in the Amazon Comprehend Developer Guide. // @@ -6678,8 +6679,8 @@ func (c *Comprehend) PutResourcePolicyRequest(input *PutResourcePolicyInput) (re // PutResourcePolicy API operation for Amazon Comprehend. // // Attaches a resource-based policy to a custom model. You can use this policy -// to authorize an entity in another AWS account to import the custom model, -// which replicates it in Amazon Comprehend in their account. +// to authorize an entity in another Amazon Web Services account to import the +// custom model, which replicates it in Amazon Comprehend in their account. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -9535,9 +9536,6 @@ type BatchDetectSentimentInput struct { // can contain a maximum of 25 documents. The maximum size of each document // is 5 KB. // - // Amazon Comprehend performs real-time sentiment analysis on the first 500 - // characters of the input text and ignores any additional text in the input. - // // TextList is a sensitive parameter and its value will be // replaced with "sensitive" in string returned by BatchDetectSentimentInput's // String and GoString methods. @@ -11092,8 +11090,8 @@ type CreateDocumentClassifierInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -11122,9 +11120,8 @@ type CreateDocumentClassifierInput struct { // is a pipe (|). Mode *string `type:"string" enum:"DocumentClassifierMode"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -11132,8 +11129,8 @@ type CreateDocumentClassifierInput struct { ModelKmsKeyId *string `type:"string"` // The resource-based policy to attach to your custom document classifier model. - // You can use this policy to allow another AWS account to import your custom - // model. + // You can use this policy to allow another Amazon Web Services account to import + // your custom model. // // Provide your policy as a JSON body that you enter as a UTF-8 encoded string // without line breaks. To provide valid JSON, enclose the attribute names and @@ -11161,13 +11158,14 @@ type CreateDocumentClassifierInput struct { // The version name given to the newly created classifier. Version names can // have a maximum of 256 characters. Alphanumeric characters, hyphens (-) and // underscores (_) are allowed. The version name must be unique among all models - // with the same classifier name in the account/AWS Region. + // with the same classifier name in the Amazon Web Services account/Amazon Web + // Services Region. VersionName *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -11365,9 +11363,9 @@ type CreateEndpointInput struct { // endpoint creation request, Amazon Comprehend will not return a ResourceInUseException. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to trained custom models - // encrypted with a customer managed key (ModelKmsKeyId). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to trained custom models encrypted with a customer managed key + // (ModelKmsKeyId). DataAccessRoleArn *string `min:"20" type:"string"` // The desired number of inference units to be used by the model using this @@ -11539,14 +11537,14 @@ type CreateEntityRecognizerInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` // Specifies the format and location of the input data. The S3 bucket containing - // the input data must be located in the same region as the entity recognizer + // the input data must be located in the same Region as the entity recognizer // being created. // // InputDataConfig is a required field @@ -11561,9 +11559,8 @@ type CreateEntityRecognizerInput struct { // LanguageCode is a required field LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -11571,8 +11568,8 @@ type CreateEntityRecognizerInput struct { ModelKmsKeyId *string `type:"string"` // The JSON resource-based policy to attach to your custom entity recognizer - // model. You can use this policy to allow another AWS account to import your - // custom model. + // model. You can use this policy to allow another Amazon Web Services account + // to import your custom model. // // Provide your JSON as a UTF-8 encoded string without line breaks. To provide // valid JSON for your policy, enclose the attribute names and values in double @@ -11589,7 +11586,7 @@ type CreateEntityRecognizerInput struct { // The name given to the newly created recognizer. Recognizer names can be a // maximum of 256 characters. Alphanumeric characters, hyphens (-) and underscores - // (_) are allowed. The name must be unique in the account/region. + // (_) are allowed. The name must be unique in the account/Region. // // RecognizerName is a required field RecognizerName *string `type:"string" required:"true"` @@ -11603,13 +11600,13 @@ type CreateEntityRecognizerInput struct { // The version name given to the newly created recognizer. Version names can // be a maximum of 256 characters. Alphanumeric characters, hyphens (-) and // underscores (_) are allowed. The version name must be unique among all models - // with the same recognizer name in the account/ AWS Region. + // with the same recognizer name in the account/Region. VersionName *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -11799,9 +11796,8 @@ type CreateFlywheelInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend the permissions required to access - // the flywheel data in the data lake. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // the permissions required to access the flywheel data in the data lake. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -11996,9 +11992,8 @@ type DataSecurityConfig struct { // data lake. DataLakeKmsKeyId *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -12193,7 +12188,7 @@ type DatasetDocumentClassifierInputDataConfig struct { // will be combined to make a single unique label, such as LABELLABELLABEL. LabelDelimiter *string `min:"1" type:"string"` - // The Amazon S3 URI for the input data. The S3 bucket must be in the same region + // The Amazon S3 URI for the input data. The S3 bucket must be in the same Region // as the API endpoint that you are calling. The URI can point to a single input // file or it can provide the prefix for a collection of input files. // @@ -12258,7 +12253,7 @@ type DatasetEntityRecognizerAnnotations struct { _ struct{} `type:"structure"` // Specifies the Amazon S3 location where the training documents for an entity - // recognizer are located. The URI must be in the same region as the API endpoint + // recognizer are located. The URI must be in the same Region as the API endpoint // that you are calling. // // S3Uri is a required field @@ -14489,10 +14484,13 @@ func (s *DetectDominantLanguageInput) SetText(v string) *DetectDominantLanguageI type DetectDominantLanguageOutput struct { _ struct{} `type:"structure" sensitive:"true"` - // The languages that Amazon Comprehend detected in the input text. For each - // language, the response returns the RFC 5646 language code and the level of - // confidence that Amazon Comprehend has in the accuracy of its inference. For - // more information about RFC 5646, see Tags for Identifying Languages (https://tools.ietf.org/html/rfc5646) + // Array of languages that Amazon Comprehend detected in the input text. The + // array is sorted in descending order of the score (the dominant language is + // always the first element in the array). + // + // For each language, the response returns the RFC 5646 language code and the + // level of confidence that Amazon Comprehend has in the accuracy of its inference. + // For more information about RFC 5646, see Tags for Identifying Languages (https://tools.ietf.org/html/rfc5646) // on the IETF Tools web site. Languages []*DominantLanguage `type:"list"` } @@ -14943,9 +14941,6 @@ type DetectSentimentInput struct { // A UTF-8 text string. The maximum string size is 5 KB. // - // Amazon Comprehend performs real-time sentiment analysis on the first 500 - // characters of the input text and ignores any additional text in the input. - // // Text is a sensitive parameter and its value will be // replaced with "sensitive" in string returned by DetectSentimentInput's // String and GoString methods. @@ -15438,8 +15433,8 @@ func (s *DocumentClassificationJobFilter) SetSubmitTimeBefore(v time.Time) *Docu type DocumentClassificationJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The Amazon Resource Name (ARN) that identifies the document classifier. @@ -15456,8 +15451,9 @@ type DocumentClassificationJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the document classification job. It is - // a unique, fully qualified identifier for the job. It includes the AWS account, - // Region, and the job ID. The format of the ARN is as follows: + // a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::document-classification-job/ // @@ -15486,10 +15482,10 @@ type DocumentClassificationJobProperties struct { // The time that the document classification job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -15708,7 +15704,7 @@ type DocumentClassifierInputDataConfig struct { // will be combined to make a single unique label, such as LABELLABELLABEL. LabelDelimiter *string `min:"1" type:"string"` - // The Amazon S3 URI for the input data. The S3 bucket must be in the same region + // The Amazon S3 URI for the input data. The S3 bucket must be in the same Region // as the API endpoint that you are calling. The URI can point to a single input // file or it can provide the prefix for a collection of input files. // @@ -15720,8 +15716,8 @@ type DocumentClassifierInputDataConfig struct { S3Uri *string `type:"string"` // This specifies the Amazon S3 location where the test annotations for an entity - // recognizer are located. The URI must be in the same AWS Region as the API - // endpoint that you are calling. + // recognizer are located. The URI must be in the same Amazon Web Services Region + // as the API endpoint that you are calling. TestS3Uri *string `type:"string"` } @@ -15803,9 +15799,9 @@ type DocumentClassifierOutputDataConfig struct { // The Amazon S3 prefix for the data lake location of the flywheel statistics. FlywheelStatsS3Prefix *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt the output results from an analysis job. The KmsKeyId can be one - // of the following formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId + // can be one of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -15818,7 +15814,7 @@ type DocumentClassifierOutputDataConfig struct { // When you use the OutputDataConfig object while creating a custom classifier, // you specify the Amazon S3 location where you want to write the confusion - // matrix. The URI must be in the same region as the API endpoint that you are + // matrix. The URI must be in the same Region as the API endpoint that you are // calling. The location is used as the prefix for the actual location of this // output file. // @@ -15878,8 +15874,8 @@ type DocumentClassifierProperties struct { // String and GoString methods. ClassifierMetadata *ClassifierMetadata `type:"structure" sensitive:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The Amazon Resource Name (ARN) that identifies the document classifier. @@ -15908,9 +15904,8 @@ type DocumentClassifierProperties struct { // once the classifier is trained. Mode *string `type:"string" enum:"DocumentClassifierMode"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -15921,8 +15916,8 @@ type DocumentClassifierProperties struct { OutputDataConfig *DocumentClassifierOutputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the source model. This model was imported - // from a different AWS account to create the document classifier model in your - // AWS account. + // from a different Amazon Web Services account to create the document classifier + // model in your Amazon Web Services account. SourceModelArn *string `type:"string"` // The status of the document classifier. If the status is TRAINED the classifier @@ -15945,10 +15940,10 @@ type DocumentClassifierProperties struct { // The version name that you assigned to the document classifier. VersionName *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -16530,8 +16525,8 @@ func (s *DominantLanguageDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *D type DominantLanguageDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the dominant language detection job completed. @@ -16542,8 +16537,9 @@ type DominantLanguageDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the dominant language detection job. It - // is a unique, fully qualified identifier for the job. It includes the AWS - // account, Region, and the job ID. The format of the ARN is as follows: + // is a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::dominant-language-detection-job/ // @@ -16572,10 +16568,10 @@ type DominantLanguageDetectionJobProperties struct { // The time that the dominant language detection job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -16751,9 +16747,9 @@ type EndpointProperties struct { // The number of inference units currently used by the model using this endpoint. CurrentInferenceUnits *int64 `min:"1" type:"integer"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to trained custom models - // encrypted with a customer managed key (ModelKmsKeyId). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to trained custom models encrypted with a customer managed key + // (ModelKmsKeyId). DataAccessRoleArn *string `min:"20" type:"string"` // Data access role ARN to use in case the new model is encrypted with a customer @@ -16962,8 +16958,8 @@ func (s *EntitiesDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *EntitiesD type EntitiesDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the entities detection job completed @@ -16972,13 +16968,17 @@ type EntitiesDetectionJobProperties struct { // The Amazon Resource Name (ARN) that identifies the entity recognizer. EntityRecognizerArn *string `type:"string"` + // The Amazon Resource Name (ARN) of the flywheel associated with this job. + FlywheelArn *string `type:"string"` + // The input data configuration that you supplied when you created the entities // detection job. InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the entities detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::entities-detection-job/ // @@ -17010,10 +17010,10 @@ type EntitiesDetectionJobProperties struct { // The time that the entities detection job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -17062,6 +17062,12 @@ func (s *EntitiesDetectionJobProperties) SetEntityRecognizerArn(v string) *Entit return s } +// SetFlywheelArn sets the FlywheelArn field's value. +func (s *EntitiesDetectionJobProperties) SetFlywheelArn(v string) *EntitiesDetectionJobProperties { + s.FlywheelArn = &v + return s +} + // SetInputDataConfig sets the InputDataConfig field's value. func (s *EntitiesDetectionJobProperties) SetInputDataConfig(v *InputDataConfig) *EntitiesDetectionJobProperties { s.InputDataConfig = v @@ -17322,14 +17328,14 @@ type EntityRecognizerAnnotations struct { _ struct{} `type:"structure"` // Specifies the Amazon S3 location where the annotations for an entity recognizer - // are located. The URI must be in the same region as the API endpoint that + // are located. The URI must be in the same Region as the API endpoint that // you are calling. // // S3Uri is a required field S3Uri *string `type:"string" required:"true"` // Specifies the Amazon S3 location where the test annotations for an entity - // recognizer are located. The URI must be in the same region as the API endpoint + // recognizer are located. The URI must be in the same Region as the API endpoint // that you are calling. TestS3Uri *string `type:"string"` } @@ -17390,15 +17396,15 @@ type EntityRecognizerDocuments struct { InputFormat *string `type:"string" enum:"InputFormat"` // Specifies the Amazon S3 location where the training documents for an entity - // recognizer are located. The URI must be in the same region as the API endpoint + // recognizer are located. The URI must be in the same Region as the API endpoint // that you are calling. // // S3Uri is a required field S3Uri *string `type:"string" required:"true"` // Specifies the Amazon S3 location where the test documents for an entity recognizer - // are located. The URI must be in the same AWS Region as the API endpoint that - // you are calling. + // are located. The URI must be in the same Amazon Web Services Region as the + // API endpoint that you are calling. TestS3Uri *string `type:"string"` } @@ -17456,7 +17462,7 @@ type EntityRecognizerEntityList struct { _ struct{} `type:"structure"` // Specifies the Amazon S3 location where the entity list is located. The URI - // must be in the same region as the API endpoint that you are calling. + // must be in the same Region as the API endpoint that you are calling. // // S3Uri is a required field S3Uri *string `type:"string" required:"true"` @@ -17931,8 +17937,8 @@ func (s *EntityRecognizerOutputDataConfig) SetFlywheelStatsS3Prefix(v string) *E type EntityRecognizerProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the recognizer creation completed. @@ -17954,9 +17960,8 @@ type EntityRecognizerProperties struct { // A description of the status of the recognizer. Message *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -17974,8 +17979,8 @@ type EntityRecognizerProperties struct { RecognizerMetadata *EntityRecognizerMetadata `type:"structure" sensitive:"true"` // The Amazon Resource Name (ARN) of the source model. This model was imported - // from a different AWS account to create the entity recognizer model in your - // AWS account. + // from a different Amazon Web Services account to create the entity recognizer + // model in your Amazon Web Services account. SourceModelArn *string `type:"string"` // Provides the status of the entity recognizer. @@ -17993,10 +17998,10 @@ type EntityRecognizerProperties struct { // The version name you assigned to the entity recognizer. VersionName *string `type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -18463,8 +18468,8 @@ func (s *EventsDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *EventsDetec type EventsDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the events detection job completed. @@ -18475,8 +18480,9 @@ type EventsDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the events detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::events-detection-job/ // @@ -18923,9 +18929,8 @@ type FlywheelProperties struct { // Creation time of the flywheel. CreationTime *time.Time `type:"timestamp"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend permission to access the flywheel - // data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // permission to access the flywheel data. DataAccessRoleArn *string `min:"20" type:"string"` // Amazon S3 URI of the data lake location. @@ -19198,14 +19203,13 @@ func (s *Geometry) SetPolygon(v []*Point) *Geometry { type ImportModelInput struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend permission to use Amazon Key Management - // Service (KMS) to encrypt or decrypt the custom model. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // permission to use Amazon Key Management Service (KMS) to encrypt or decrypt + // the custom model. DataAccessRoleArn *string `min:"20" type:"string"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -19230,7 +19234,7 @@ type ImportModelInput struct { // The version name given to the custom model that is created by this import. // Version names can have a maximum of 256 characters. Alphanumeric characters, // hyphens (-) and underscores (_) are allowed. The version name must be unique - // among all models with the same classifier name in the account/AWS Region. + // among all models with the same classifier name in the account/Region. VersionName *string `type:"string"` } @@ -19365,7 +19369,7 @@ type InputDataConfig struct { // text messages. InputFormat *string `type:"string" enum:"InputFormat"` - // The Amazon S3 URI for the input data. The URI must be in same region as the + // The Amazon S3 URI for the input data. The URI must be in same Region as the // API endpoint that you are calling. The URI can point to a single input file // or it can provide the prefix for a collection of data files. // @@ -19892,8 +19896,8 @@ func (s *KeyPhrasesDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *KeyPhra type KeyPhrasesDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the key phrases detection job completed. @@ -19904,8 +19908,9 @@ type KeyPhrasesDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the key phrases detection job. It is a - // unique, fully qualified identifier for the job. It includes the AWS account, - // Region, and the job ID. The format of the ARN is as follows: + // unique, fully qualified identifier for the job. It includes the Amazon Web + // Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::key-phrases-detection-job/ // @@ -19937,10 +19942,9 @@ type KeyPhrasesDetectionJobProperties struct { // The time that the key phrases detection job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt data on the storage + // volume attached to the ML compute instance(s) that process the analysis job. + // The VolumeKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -22115,9 +22119,9 @@ func (s *MentionSentiment) SetSentimentScore(v *SentimentScore) *MentionSentimen type OutputDataConfig struct { _ struct{} `type:"structure"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt the output results from an analysis job. The KmsKeyId can be one - // of the following formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt the output results from an analysis job. The KmsKeyId + // can be one of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -22130,7 +22134,7 @@ type OutputDataConfig struct { // When you use the OutputDataConfig object with asynchronous operations, you // specify the Amazon S3 location where you want to write the output data. The - // URI must be in the same region as the API endpoint that you are calling. + // URI must be in the same Region as the API endpoint that you are calling. // The location is used as the prefix for the actual location of the output // file. // @@ -22317,8 +22321,8 @@ func (s *PiiEntitiesDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *PiiEnt type PiiEntitiesDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the PII entities detection job completed. @@ -22328,8 +22332,9 @@ type PiiEntitiesDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the PII entities detection job. It is a - // unique, fully qualified identifier for the job. It includes the AWS account, - // Region, and the job ID. The format of the ARN is as follows: + // unique, fully qualified identifier for the job. It includes the Amazon Web + // Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::pii-entities-detection-job/ // @@ -22536,8 +22541,8 @@ func (s *PiiEntity) SetType(v string) *PiiEntity { type PiiOutputDataConfig struct { _ struct{} `type:"structure"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt the output results from an analysis job. + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt the output results from an analysis job. KmsKeyId *string `type:"string"` // When you use the PiiOutputDataConfig object with asynchronous operations, @@ -23190,8 +23195,8 @@ func (s *SentimentDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *Sentimen type SentimentDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the sentiment detection job ended. @@ -23202,8 +23207,9 @@ type SentimentDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::sentiment-detection-job/ // @@ -23235,10 +23241,10 @@ type SentimentDetectionJobProperties struct { // The time that the sentiment detection job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -23418,8 +23424,8 @@ type StartDocumentClassificationJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -23451,10 +23457,10 @@ type StartDocumentClassificationJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -23605,8 +23611,9 @@ type StartDocumentClassificationJobOutput struct { DocumentClassifierArn *string `type:"string"` // The Amazon Resource Name (ARN) of the document classification job. It is - // a unique, fully qualified identifier for the job. It includes the AWS account, - // Region, and the job ID. The format of the ARN is as follows: + // a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::document-classification-job/ // @@ -23686,10 +23693,9 @@ type StartDominantLanguageDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions - // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/security_iam_id-based-policy-examples.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -23713,10 +23719,10 @@ type StartDominantLanguageDetectionJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -23852,8 +23858,9 @@ type StartDominantLanguageDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the dominant language detection job. It - // is a unique, fully qualified identifier for the job. It includes the AWS - // account, Region, and the job ID. The format of the ARN is as follows: + // is a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::dominant-language-detection-job/ // @@ -23921,10 +23928,9 @@ type StartEntitiesDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions - // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/security_iam_id-based-policy-examples.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -23965,10 +23971,10 @@ type StartEntitiesDetectionJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -24128,8 +24134,9 @@ type StartEntitiesDetectionJobOutput struct { EntityRecognizerArn *string `type:"string"` // The Amazon Resource Name (ARN) of the entities detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::entities-detection-job/ // @@ -24208,8 +24215,8 @@ type StartEventsDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -24371,8 +24378,9 @@ type StartEventsDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the events detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::events-detection-job/ // @@ -24529,10 +24537,9 @@ type StartKeyPhrasesDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions - // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/security_iam_id-based-policy-examples.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -24562,10 +24569,10 @@ type StartKeyPhrasesDetectionJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -24710,8 +24717,9 @@ type StartKeyPhrasesDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the key phrase detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::key-phrases-detection-job/ // @@ -24779,8 +24787,8 @@ type StartPiiEntitiesDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -24960,8 +24968,9 @@ type StartPiiEntitiesDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the PII entity detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::pii-entities-detection-job/ // @@ -25020,10 +25029,9 @@ type StartSentimentDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions - // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/security_iam_id-based-policy-examples.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -25053,10 +25061,10 @@ type StartSentimentDetectionJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -25201,8 +25209,9 @@ type StartSentimentDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the sentiment detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::sentiment-detection-job/ // @@ -25270,9 +25279,9 @@ type StartTargetedSentimentDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see Role-based permissions (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -25450,8 +25459,9 @@ type StartTargetedSentimentDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the targeted sentiment detection job. It - // is a unique, fully qualified identifier for the job. It includes the AWS - // account, Region, and the job ID. The format of the ARN is as follows: + // is a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::targeted-sentiment-detection-job/ // @@ -25520,10 +25530,9 @@ type StartTopicsDetectionJobInput struct { // token, Amazon Comprehend generates one. ClientRequestToken *string `min:"1" type:"string" idempotencyToken:"true"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. - // For more information, see https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions - // (https://docs.aws.amazon.com/comprehend/latest/dg/access-control-managing-permissions.html#auth-role-permissions). + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. For more information, see Role-based permissions + // (https://docs.aws.amazon.com/comprehend/latest/dg/security_iam_id-based-policy-examples.html#auth-role-permissions). // // DataAccessRoleArn is a required field DataAccessRoleArn *string `min:"20" type:"string" required:"true"` @@ -25552,10 +25561,10 @@ type StartTopicsDetectionJobInput struct { // use by the sales department. Tags []*Tag `type:"list"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -25700,8 +25709,9 @@ type StartTopicsDetectionJobOutput struct { _ struct{} `type:"structure"` // The Amazon Resource Name (ARN) of the topics detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::topics-detection-job/ // @@ -26847,8 +26857,8 @@ func (s *TargetedSentimentDetectionJobFilter) SetSubmitTimeBefore(v time.Time) * type TargetedSentimentDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your input data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your input data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the targeted sentiment detection job ended. @@ -26859,8 +26869,9 @@ type TargetedSentimentDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the targeted sentiment detection job. It - // is a unique, fully qualified identifier for the job. It includes the AWS - // account, Region, and the job ID. The format of the ARN is as follows: + // is a unique, fully qualified identifier for the job. It includes the Amazon + // Web Services account, Amazon Web Services Region, and the job ID. The format + // of the ARN is as follows: // // arn::comprehend:::targeted-sentiment-detection-job/ // @@ -27556,8 +27567,8 @@ func (s *TopicsDetectionJobFilter) SetSubmitTimeBefore(v time.Time) *TopicsDetec type TopicsDetectionJobProperties struct { _ struct{} `type:"structure"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend read access to your job data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // read access to your job data. DataAccessRoleArn *string `min:"20" type:"string"` // The time that the topic detection job was completed. @@ -27568,8 +27579,9 @@ type TopicsDetectionJobProperties struct { InputDataConfig *InputDataConfig `type:"structure"` // The Amazon Resource Name (ARN) of the topics detection job. It is a unique, - // fully qualified identifier for the job. It includes the AWS account, Region, - // and the job ID. The format of the ARN is as follows: + // fully qualified identifier for the job. It includes the Amazon Web Services + // account, Amazon Web Services Region, and the job ID. The format of the ARN + // is as follows: // // arn::comprehend:::topics-detection-job/ // @@ -27602,10 +27614,10 @@ type TopicsDetectionJobProperties struct { // The time that the topic detection job was submitted for processing. SubmitTime *time.Time `type:"timestamp"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt data on the storage volume attached to the ML compute instance(s) - // that process the analysis job. The VolumeKmsKeyId can be either of the following - // formats: + // ID for the Amazon Web Services Key Management Service (KMS) key that Amazon + // Comprehend uses to encrypt data on the storage volume attached to the ML + // compute instance(s) that process the analysis job. The VolumeKmsKeyId can + // be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -27872,9 +27884,8 @@ func (s UntagResourceOutput) GoString() string { type UpdateDataSecurityConfig struct { _ struct{} `type:"structure"` - // ID for the AWS Key Management Service (KMS) key that Amazon Comprehend uses - // to encrypt trained custom models. The ModelKmsKeyId can be either of the - // following formats: + // ID for the KMS key that Amazon Comprehend uses to encrypt trained custom + // models. The ModelKmsKeyId can be either of the following formats: // // * KMS Key ID: "1234abcd-12ab-34cd-56ef-1234567890ab" // @@ -28069,9 +28080,8 @@ type UpdateFlywheelInput struct { // The Amazon Resource Number (ARN) of the active model version. ActiveModelArn *string `type:"string"` - // The Amazon Resource Name (ARN) of the AWS Identity and Access Management - // (IAM) role that grants Amazon Comprehend permission to access the flywheel - // data. + // The Amazon Resource Name (ARN) of the IAM role that grants Amazon Comprehend + // permission to access the flywheel data. DataAccessRoleArn *string `min:"20" type:"string"` // Flywheel data security configuration. @@ -28195,7 +28205,7 @@ type VpcConfig struct { // The ID for each subnet being used in your private VPC. This subnet is a subset // of the a range of IPv4 addresses used by the VPC and is specific to a given - // availability zone in the VPC’s region. This ID number is preceded by "subnet-", + // availability zone in the VPC’s Region. This ID number is preceded by "subnet-", // for instance: "subnet-04ccf456919e69055". For more information, see VPCs // and Subnets (https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Subnets.html). // @@ -28772,6 +28782,9 @@ const ( // ModelStatusTrained is a ModelStatus enum value ModelStatusTrained = "TRAINED" + + // ModelStatusTrainedWithWarning is a ModelStatus enum value + ModelStatusTrainedWithWarning = "TRAINED_WITH_WARNING" ) // ModelStatus_Values returns all elements of the ModelStatus enum @@ -28784,6 +28797,7 @@ func ModelStatus_Values() []string { ModelStatusStopped, ModelStatusInError, ModelStatusTrained, + ModelStatusTrainedWithWarning, } } diff --git a/service/comprehend/doc.go b/service/comprehend/doc.go index 351f0c8b440..9e26f63df2e 100644 --- a/service/comprehend/doc.go +++ b/service/comprehend/doc.go @@ -3,10 +3,10 @@ // Package comprehend provides the client and types for making API // requests to Amazon Comprehend. // -// Amazon Comprehend is an AWS service for gaining insight into the content -// of documents. Use these actions to determine the topics contained in your -// documents, the topics they discuss, the predominant sentiment expressed in -// them, the predominant language used, and more. +// Amazon Comprehend is an Amazon Web Services service for gaining insight into +// the content of documents. Use these actions to determine the topics contained +// in your documents, the topics they discuss, the predominant sentiment expressed +// in them, the predominant language used, and more. // // See https://docs.aws.amazon.com/goto/WebAPI/comprehend-2017-11-27 for more information on this service. // diff --git a/service/rds/api.go b/service/rds/api.go index 7f6cd736ddc..6f9a174a0cd 100644 --- a/service/rds/api.go +++ b/service/rds/api.go @@ -1444,6 +1444,9 @@ func (c *RDS) CreateCustomDBEngineVersionRequest(input *CreateCustomDBEngineVers // - ErrCodeKMSKeyNotAccessibleFault "KMSKeyNotAccessibleFault" // An error occurred accessing an Amazon Web Services KMS key. // +// - ErrCodeCreateCustomDBEngineVersionFault "CreateCustomDBEngineVersionFault" +// An error occurred while trying to create the CEV. +// // See also, https://docs.aws.amazon.com/goto/WebAPI/rds-2014-10-31/CreateCustomDBEngineVersion func (c *RDS) CreateCustomDBEngineVersion(input *CreateCustomDBEngineVersionInput) (*CreateCustomDBEngineVersionOutput, error) { req, out := c.CreateCustomDBEngineVersionRequest(input) @@ -1511,6 +1514,12 @@ func (c *RDS) CreateDBClusterRequest(input *CreateDBClusterInput) (req *request. // // Creates a new Amazon Aurora DB cluster or Multi-AZ DB cluster. // +// If you create an Aurora DB cluster, the request creates an empty cluster. +// You must explicitly create the writer instance for your DB cluster using +// the CreateDBInstance (https://docs.aws.amazon.com/AmazonRDS/latest/APIReference/API_CreateDBInstance.html) +// operation. If you create a Multi-AZ DB cluster, the request creates a writer +// and two reader DB instances for you, each in a different Availability Zone. +// // You can use the ReplicationSourceIdentifier parameter to create an Amazon // Aurora DB cluster as a read replica of another DB cluster or Amazon RDS MySQL // or PostgreSQL DB instance. For more information about Amazon Aurora, see @@ -14944,11 +14953,11 @@ func (c *RDS) StartExportTaskRequest(input *StartExportTaskInput) (req *request. // For more information on exporting DB snapshot data, see Exporting DB snapshot // data to Amazon S3 (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/USER_ExportSnapshot.html) // in the Amazon RDS User Guide or Exporting DB cluster snapshot data to Amazon -// S3 (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/aurora-export-snapshot.html) +// S3 (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-export-snapshot.html) // in the Amazon Aurora User Guide. // // For more information on exporting DB cluster data, see Exporting DB cluster -// data to Amazon S3 (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/export-cluster-data.html) +// data to Amazon S3 (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/export-cluster-data.html) // in the Amazon Aurora User Guide. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions @@ -19754,9 +19763,7 @@ type CreateDBClusterInput struct { // // Valid Values: // - // * aurora (for MySQL 5.6-compatible Aurora) - // - // * aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora) + // * aurora-mysql // // * aurora-postgresql // @@ -19769,23 +19776,10 @@ type CreateDBClusterInput struct { // Engine is a required field Engine *string `type:"string" required:"true"` - // The DB engine mode of the DB cluster, either provisioned, serverless, parallelquery, - // global, or multimaster. - // - // The parallelquery engine mode isn't required for Aurora MySQL version 1.23 - // and higher 1.x versions, and version 2.09 and higher 2.x versions. - // - // The global engine mode isn't required for Aurora MySQL version 1.22 and higher - // 1.x versions, and global engine mode isn't required for any 2.x versions. - // - // The multimaster engine mode only applies for DB clusters created with Aurora - // MySQL version 5.6.10a. + // The DB engine mode of the DB cluster, either provisioned or serverless. // // The serverless engine mode only applies for Aurora Serverless v1 DB clusters. // - // For Aurora PostgreSQL, the global engine mode isn't required, and both the - // parallelquery and the multimaster engine modes currently aren't supported. - // // Limitations and requirements apply to some DB engine modes. For more information, // see the following sections in the Amazon Aurora User Guide: // @@ -19793,27 +19787,23 @@ type CreateDBClusterInput struct { // // * Requirements for Aurora Serverless v2 (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-serverless-v2.requirements.html) // - // * Limitations of Parallel Query (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-mysql-parallel-query.html#aurora-mysql-parallel-query-limitations) - // - // * Limitations of Aurora Global Databases (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database.limitations) + // * Limitations of parallel query (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-mysql-parallel-query.html#aurora-mysql-parallel-query-limitations) // - // * Limitations of Multi-Master Clusters (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-multi-master.html#aurora-multi-master-limitations) + // * Limitations of Aurora global databases (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/aurora-global-database.html#aurora-global-database.limitations) // // Valid for: Aurora DB clusters only EngineMode *string `type:"string"` // The version number of the database engine to use. // - // To list all of the available engine versions for MySQL 5.6-compatible Aurora, - // use the following command: - // - // aws rds describe-db-engine-versions --engine aurora --query "DBEngineVersions[].EngineVersion" - // - // To list all of the available engine versions for MySQL 5.7-compatible and - // MySQL 8.0-compatible Aurora, use the following command: + // To list all of the available engine versions for Aurora MySQL version 2 (5.7-compatible) + // and version 3 (MySQL 8.0-compatible), use the following command: // // aws rds describe-db-engine-versions --engine aurora-mysql --query "DBEngineVersions[].EngineVersion" // + // You can supply either 5.7 or 8.0 to use the default engine version for Aurora + // MySQL version 2 or version 3, respectively. + // // To list all of the available engine versions for Aurora PostgreSQL, use the // following command: // @@ -19831,7 +19821,7 @@ type CreateDBClusterInput struct { // // Aurora MySQL // - // For information, see MySQL on Amazon RDS Versions (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) + // For information, see Database engine updates for Amazon Aurora MySQL (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) // in the Amazon Aurora User Guide. // // Aurora PostgreSQL @@ -19842,12 +19832,12 @@ type CreateDBClusterInput struct { // // MySQL // - // For information, see MySQL on Amazon RDS Versions (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_MySQL.html#MySQL.Concepts.VersionMgmt) + // For information, see Amazon RDS for MySQL (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_MySQL.html#MySQL.Concepts.VersionMgmt) // in the Amazon RDS User Guide. // // PostgreSQL // - // For information, see Amazon RDS for PostgreSQL versions and extensions (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_PostgreSQL.html#PostgreSQL.Concepts) + // For information, see Amazon RDS for PostgreSQL (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_PostgreSQL.html#PostgreSQL.Concepts) // in the Amazon RDS User Guide. // // Valid for: Aurora DB clusters and Multi-AZ DB clusters @@ -19862,8 +19852,7 @@ type CreateDBClusterInput struct { // The amount of Provisioned IOPS (input/output operations per second) to be // initially allocated for each DB instance in the Multi-AZ DB cluster. // - // For information about valid IOPS values, see Amazon RDS Provisioned IOPS - // storage (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#USER_PIOPS) + // For information about valid IOPS values, see Provisioned IOPS storage (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html#USER_PIOPS) // in the Amazon RDS User Guide. // // This setting is required to create a Multi-AZ DB cluster. @@ -32709,8 +32698,6 @@ type DescribeDBEngineVersionsInput struct { // // Valid Values: // - // * aurora (for MySQL 5.6-compatible Aurora) - // // * aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora) // // * aurora-postgresql @@ -36378,8 +36365,6 @@ type DescribeOrderableDBInstanceOptionsInput struct { // // Valid Values: // - // * aurora (for MySQL 5.6-compatible Aurora) - // // * aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible Aurora) // // * aurora-postgresql @@ -38620,8 +38605,7 @@ type GlobalCluster struct { // is the unique key that identifies a global database cluster. GlobalClusterIdentifier *string `type:"string"` - // The list of cluster IDs for secondary clusters within the global database - // cluster. Currently limited to 1 item. + // The list of primary and secondary clusters within the global database cluster. GlobalClusterMembers []*GlobalClusterMember `locationNameList:"GlobalClusterMember" type:"list"` // The Amazon Web Services Region-unique, immutable identifier for the global @@ -40302,13 +40286,8 @@ type ModifyDBClusterInput struct { // this parameter results in an outage. The change is applied during the next // maintenance window unless ApplyImmediately is enabled. // - // To list all of the available engine versions for MySQL 5.6-compatible Aurora, - // use the following command: - // - // aws rds describe-db-engine-versions --engine aurora --query "DBEngineVersions[].EngineVersion" - // - // To list all of the available engine versions for MySQL 5.7-compatible and - // MySQL 8.0-compatible Aurora, use the following command: + // To list all of the available engine versions for Aurora MySQL version 2 (5.7-compatible) + // and version 3 (MySQL 8.0-compatible), use the following command: // // aws rds describe-db-engine-versions --engine aurora-mysql --query "DBEngineVersions[].EngineVersion" // @@ -47052,10 +47031,6 @@ type RestoreDBClusterFromS3Input struct { // // Possible values are audit, error, general, and slowquery. // - // Aurora PostgreSQL - // - // Possible value is postgresql. - // // For more information about exporting CloudWatch Logs for Amazon Aurora, see // Publishing Database Logs to Amazon CloudWatch Logs (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/USER_LogAccess.html#USER_LogAccess.Procedural.UploadtoCloudWatch) // in the Amazon Aurora User Guide. @@ -47071,27 +47046,22 @@ type RestoreDBClusterFromS3Input struct { // The name of the database engine to be used for this DB cluster. // - // Valid Values: aurora (for MySQL 5.6-compatible Aurora) and aurora-mysql (for - // MySQL 5.7-compatible and MySQL 8.0-compatible Aurora) + // Valid Values: aurora-mysql (for MySQL 5.7-compatible and MySQL 8.0-compatible + // Aurora) // // Engine is a required field Engine *string `type:"string" required:"true"` // The version number of the database engine to use. // - // To list all of the available engine versions for aurora (for MySQL 5.6-compatible - // Aurora), use the following command: - // - // aws rds describe-db-engine-versions --engine aurora --query "DBEngineVersions[].EngineVersion" - // - // To list all of the available engine versions for aurora-mysql (for MySQL - // 5.7-compatible and MySQL 8.0-compatible Aurora), use the following command: + // To list all of the available engine versions for aurora-mysql (MySQL 5.7-compatible + // and MySQL 8.0-compatible Aurora), use the following command: // // aws rds describe-db-engine-versions --engine aurora-mysql --query "DBEngineVersions[].EngineVersion" // // Aurora MySQL // - // Example: 5.6.10a, 5.6.mysql_aurora.1.19.2, 5.7.mysql_aurora.2.07.1, 8.0.mysql_aurora.3.02.0 + // Examples: 5.7.mysql_aurora.2.07.1, 8.0.mysql_aurora.3.02.0 EngineVersion *string `type:"string"` // The Amazon Web Services KMS key identifier for an encrypted DB cluster. @@ -47763,12 +47733,9 @@ type RestoreDBClusterFromSnapshotInput struct { // Valid for: Aurora DB clusters only EngineMode *string `type:"string"` - // The version of the database engine to use for the new DB cluster. - // - // To list all of the available engine versions for MySQL 5.6-compatible Aurora, - // use the following command: - // - // aws rds describe-db-engine-versions --engine aurora --query "DBEngineVersions[].EngineVersion" + // The version of the database engine to use for the new DB cluster. If you + // don't specify an engine version, the default version for the database engine + // in the Amazon Web Services Region is used. // // To list all of the available engine versions for MySQL 5.7-compatible and // MySQL 8.0-compatible Aurora, use the following command: @@ -47792,7 +47759,7 @@ type RestoreDBClusterFromSnapshotInput struct { // // Aurora MySQL // - // See MySQL on Amazon RDS Versions (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) + // See Database engine updates for Amazon Aurora MySQL (https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/AuroraMySQL.Updates.html) // in the Amazon Aurora User Guide. // // Aurora PostgreSQL @@ -47802,7 +47769,7 @@ type RestoreDBClusterFromSnapshotInput struct { // // MySQL // - // See MySQL on Amazon RDS Versions (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_MySQL.html#MySQL.Concepts.VersionMgmt) + // See Amazon RDS for MySQL (https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_MySQL.html#MySQL.Concepts.VersionMgmt) // in the Amazon RDS User Guide. // // PostgreSQL @@ -52024,6 +51991,28 @@ type StartExportTaskInput struct { // The name of the IAM role to use for writing to the Amazon S3 bucket when // exporting a snapshot or cluster. // + // In the IAM policy attached to your IAM role, include the following required + // actions to allow the transfer of files from Amazon RDS or Amazon Aurora to + // an S3 bucket: + // + // * s3:PutObject* + // + // * s3:GetObject* + // + // * s3:ListBucket + // + // * s3:DeleteObject* + // + // * s3:GetBucketLocation + // + // In the policy, include the resources to identify the S3 bucket and objects + // in the bucket. The following list of resources shows the Amazon Resource + // Name (ARN) format for accessing S3: + // + // * arn:aws:s3:::your-s3-bucket + // + // * arn:aws:s3:::your-s3-bucket/* + // // IamRoleArn is a required field IamRoleArn *string `type:"string" required:"true"` @@ -52924,16 +52913,19 @@ type SwitchoverDetail struct { // // Values: // - // * preparing-for-switchover - The resource is being prepared to switch - // over. + // * PROVISIONING - The resource is being prepared to switch over. + // + // * AVAILABLE - The resource is ready to switch over. + // + // * SWITCHOVER_IN_PROGRESS - The resource is being switched over. // - // * ready-for-switchover - The resource is ready to switch over. + // * SWITCHOVER_COMPLETED - The resource has been switched over. // - // * switchover-in-progress - The resource is being switched over. + // * SWITCHOVER_FAILED - The resource attempted to switch over but failed. // - // * switchover-completed - The resource has been switched over. + // * MISSING_SOURCE - The source resource has been deleted. // - // * switchover-failed - The resource attempted to switch over but failed. + // * MISSING_TARGET - The target resource has been deleted. Status *string `type:"string"` // The Amazon Resource Name (ARN) of a resource in the green environment. diff --git a/service/rds/errors.go b/service/rds/errors.go index 5c50726f6ea..6027561b20f 100644 --- a/service/rds/errors.go +++ b/service/rds/errors.go @@ -49,6 +49,12 @@ const ( // CertificateIdentifier doesn't refer to an existing certificate. ErrCodeCertificateNotFoundFault = "CertificateNotFound" + // ErrCodeCreateCustomDBEngineVersionFault for service response error code + // "CreateCustomDBEngineVersionFault". + // + // An error occurred while trying to create the CEV. + ErrCodeCreateCustomDBEngineVersionFault = "CreateCustomDBEngineVersionFault" + // ErrCodeCustomAvailabilityZoneNotFoundFault for service response error code // "CustomAvailabilityZoneNotFound". //