From f3f7488c2060cb64f88b2c1ce49d683b609b06c9 Mon Sep 17 00:00:00 2001 From: aws-sdk-go-automation <43143561+aws-sdk-go-automation@users.noreply.github.com> Date: Mon, 22 Feb 2021 11:19:44 -0800 Subject: [PATCH] Release v1.37.16 (2021-02-22) (#3795) Release v1.37.16 (2021-02-22) === ### Service Client Updates * `service/runtime.sagemaker`: Updates service API and documentation * `service/sagemaker`: Updates service API and documentation * Amazon SageMaker now supports core dump for SageMaker Endpoints and direct invocation of a single container in a SageMaker Endpoint that hosts multiple containers. --- CHANGELOG.md | 8 + aws/version.go | 2 +- .../runtime.sagemaker/2017-05-13/api-2.json | 10 + .../runtime.sagemaker/2017-05-13/docs-2.json | 6 + models/apis/sagemaker/2017-07-24/api-2.json | 27 ++- models/apis/sagemaker/2017-07-24/docs-2.json | 29 ++- service/sagemaker/api.go | 181 +++++++++++++++++- service/sagemakerruntime/api.go | 10 + 8 files changed, 263 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4891592a184..158296130fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +Release v1.37.16 (2021-02-22) +=== + +### Service Client Updates +* `service/runtime.sagemaker`: Updates service API and documentation +* `service/sagemaker`: Updates service API and documentation + * Amazon SageMaker now supports core dump for SageMaker Endpoints and direct invocation of a single container in a SageMaker Endpoint that hosts multiple containers. + Release v1.37.15 (2021-02-19) === diff --git a/aws/version.go b/aws/version.go index fe73428eb24..d00ac003418 100644 --- a/aws/version.go +++ b/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.37.15" +const SDKVersion = "1.37.16" diff --git a/models/apis/runtime.sagemaker/2017-05-13/api-2.json b/models/apis/runtime.sagemaker/2017-05-13/api-2.json index 82c0208afa2..a15bb58e01b 100644 --- a/models/apis/runtime.sagemaker/2017-05-13/api-2.json +++ b/models/apis/runtime.sagemaker/2017-05-13/api-2.json @@ -104,6 +104,11 @@ "location":"header", "locationName":"X-Amzn-SageMaker-Target-Variant" }, + "TargetContainerHostname":{ + "shape":"TargetContainerHostnameHeader", + "location":"header", + "locationName":"X-Amzn-SageMaker-Target-Container-Hostname" + }, "InferenceId":{ "shape":"InferenceId", "location":"header", @@ -162,6 +167,11 @@ "synthetic":true }, "StatusCode":{"type":"integer"}, + "TargetContainerHostnameHeader":{ + "type":"string", + "max":63, + "pattern":"^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + }, "TargetModelHeader":{ "type":"string", "max":1024, diff --git a/models/apis/runtime.sagemaker/2017-05-13/docs-2.json b/models/apis/runtime.sagemaker/2017-05-13/docs-2.json index 9b482015823..8683ba57fd6 100644 --- a/models/apis/runtime.sagemaker/2017-05-13/docs-2.json +++ b/models/apis/runtime.sagemaker/2017-05-13/docs-2.json @@ -87,6 +87,12 @@ "ModelError$OriginalStatusCode": "
Original status code.
" } }, + "TargetContainerHostnameHeader": { + "base": null, + "refs": { + "InvokeEndpointInput$TargetContainerHostname": "If the endpoint hosts multiple containers and is configured to use direct invocation, this parameter specifies the host name of the container to invoke.
" + } + }, "TargetModelHeader": { "base": null, "refs": { diff --git a/models/apis/sagemaker/2017-07-24/api-2.json b/models/apis/sagemaker/2017-07-24/api-2.json index 99edd4969e1..a204038be21 100644 --- a/models/apis/sagemaker/2017-07-24/api-2.json +++ b/models/apis/sagemaker/2017-07-24/api-2.json @@ -4624,6 +4624,7 @@ "ModelName":{"shape":"ModelName"}, "PrimaryContainer":{"shape":"ContainerDefinition"}, "Containers":{"shape":"ContainerDefinitionList"}, + "InferenceExecutionConfig":{"shape":"InferenceExecutionConfig"}, "ExecutionRoleArn":{"shape":"RoleArn"}, "Tags":{"shape":"TagList"}, "VpcConfig":{"shape":"VpcConfig"}, @@ -6460,6 +6461,7 @@ "ModelName":{"shape":"ModelName"}, "PrimaryContainer":{"shape":"ContainerDefinition"}, "Containers":{"shape":"ContainerDefinitionList"}, + "InferenceExecutionConfig":{"shape":"InferenceExecutionConfig"}, "ExecutionRoleArn":{"shape":"RoleArn"}, "VpcConfig":{"shape":"VpcConfig"}, "CreationTime":{"shape":"Timestamp"}, @@ -8569,6 +8571,20 @@ "type":"list", "member":{"shape":"Image"} }, + "InferenceExecutionConfig":{ + "type":"structure", + "required":["Mode"], + "members":{ + "Mode":{"shape":"InferenceExecutionMode"} + } + }, + "InferenceExecutionMode":{ + "type":"string", + "enum":[ + "Serial", + "Direct" + ] + }, "InferenceSpecification":{ "type":"structure", "required":[ @@ -11999,7 +12015,8 @@ "InitialInstanceCount":{"shape":"TaskCount"}, "InstanceType":{"shape":"ProductionVariantInstanceType"}, "InitialVariantWeight":{"shape":"VariantWeight"}, - "AcceleratorType":{"shape":"ProductionVariantAcceleratorType"} + "AcceleratorType":{"shape":"ProductionVariantAcceleratorType"}, + "CoreDumpConfig":{"shape":"ProductionVariantCoreDumpConfig"} } }, "ProductionVariantAcceleratorType":{ @@ -12013,6 +12030,14 @@ "ml.eia2.xlarge" ] }, + "ProductionVariantCoreDumpConfig":{ + "type":"structure", + "required":["DestinationS3Uri"], + "members":{ + "DestinationS3Uri":{"shape":"DestinationS3Uri"}, + "KmsKeyId":{"shape":"KmsKeyId"} + } + }, "ProductionVariantInstanceType":{ "type":"string", "enum":[ diff --git a/models/apis/sagemaker/2017-07-24/docs-2.json b/models/apis/sagemaker/2017-07-24/docs-2.json index 2a559b8c995..a9bacd0ab5c 100644 --- a/models/apis/sagemaker/2017-07-24/docs-2.json +++ b/models/apis/sagemaker/2017-07-24/docs-2.json @@ -2995,7 +2995,8 @@ "base": null, "refs": { "DataCaptureConfig$DestinationS3Uri": "", - "DataCaptureConfigSummary$DestinationS3Uri": "" + "DataCaptureConfigSummary$DestinationS3Uri": "", + "ProductionVariantCoreDumpConfig$DestinationS3Uri": "The Amazon S3 bucket to send the core dump to.
" } }, "DetailedAlgorithmStatus": { @@ -4618,6 +4619,19 @@ "ListImagesResponse$Images": "A list of images and their properties.
" } }, + "InferenceExecutionConfig": { + "base": "Specifies details about how containers in a multi-container are run.
", + "refs": { + "CreateModelInput$InferenceExecutionConfig": "Specifies details of how containers in a multi-container endpoint are called.
", + "DescribeModelOutput$InferenceExecutionConfig": "Specifies details of how containers in a multi-container endpoint are called.
" + } + }, + "InferenceExecutionMode": { + "base": null, + "refs": { + "InferenceExecutionConfig$Mode": "How containers in a multi-container are run. The following values are valid.
SERIAL
- Containers run as a serial pipeline.
DIRECT
- Only the individual container that you specify is run.
Defines how to perform inference generation after a training job is run.
", "refs": { @@ -4817,9 +4831,10 @@ "MonitoringOutputConfig$KmsKeyId": "The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
", "OnlineStoreSecurityConfig$KmsKeyId": "The ID of the AWS Key Management Service (AWS KMS) key that SageMaker Feature Store uses to encrypt the Amazon S3 objects at rest using Amazon S3 server-side encryption.
The caller (either IAM user or IAM role) of CreateFeatureGroup
must have below permissions to the OnlineStore
KmsKeyId
:
\"kms:Encrypt\"
\"kms:Decrypt\"
\"kms:DescribeKey\"
\"kms:CreateGrant\"
\"kms:RetireGrant\"
\"kms:ReEncryptFrom\"
\"kms:ReEncryptTo\"
\"kms:GenerateDataKey\"
\"kms:ListAliases\"
\"kms:ListGrants\"
\"kms:RevokeGrant\"
The caller (either IAM user or IAM role) to all DataPlane operations (PutRecord
, GetRecord
, DeleteRecord
) must have the following permissions to the KmsKeyId
:
\"kms:Decrypt\"
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume after compilation job. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account
The KmsKeyId can be any of the following formats:
Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab
Key ARN: arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab
Alias name: alias/ExampleAlias
Alias name ARN: arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId
can be any of the following formats:
// KMS Key ID
\"1234abcd-12ab-34cd-56ef-1234567890ab\"
// Amazon Resource Name (ARN) of a KMS Key
\"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"
// KMS Key Alias
\"alias/ExampleAlias\"
// Amazon Resource Name (ARN) of a KMS Key Alias
\"arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\"
If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must include permissions to call kms:Encrypt
. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side encryption with KMS-managed keys for OutputDataConfig
. If you use a bucket policy with an s3:PutObject
permission that only allows objects with server-side encryption, set the condition key of s3:x-amz-server-side-encryption
to \"aws:kms\"
. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.
The KMS key policy must grant permission to the IAM role that you specify in your CreateTrainingJob
, CreateTransformJob
, or CreateHyperParameterTuningJob
requests. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId
can be any of the following formats:
// KMS Key ID
\"1234abcd-12ab-34cd-56ef-1234567890ab\"
// Amazon Resource Name (ARN) of a KMS Key
\"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"
// KMS Key Alias
\"alias/ExampleAlias\"
// Amazon Resource Name (ARN) of a KMS Key Alias
\"arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\"
If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must include permissions to call kms:Encrypt
. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side encryption with KMS-managed keys for OutputDataConfig
. If you use a bucket policy with an s3:PutObject
permission that only allows objects with server-side encryption, set the condition key of s3:x-amz-server-side-encryption
to \"aws:kms\"
. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.
The KMS key policy must grant permission to the IAM role that you specify in your CreateTrainingJob
, CreateTransformJob
, or CreateHyperParameterTuningJob
requests. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the processing job.
", "ProcessingOutputConfig$KmsKeyId": "The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the processing job output. KmsKeyId
can be an ID of a KMS key, ARN of a KMS key, alias of a KMS key, or alias of a KMS key. The KmsKeyId
is applied to all outputs.
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the core dump data at rest using Amazon S3 server-side encryption. The KmsKeyId
can be any of the following formats:
// KMS Key ID
\"1234abcd-12ab-34cd-56ef-1234567890ab\"
// Amazon Resource Name (ARN) of a KMS Key
\"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"
// KMS Key Alias
\"alias/ExampleAlias\"
// Amazon Resource Name (ARN) of a KMS Key Alias
\"arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\"
If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must include permissions to call kms:Encrypt
. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side encryption with KMS-managed keys for OutputDataConfig
. If you use a bucket policy with an s3:PutObject
permission that only allows objects with server-side encryption, set the condition key of s3:x-amz-server-side-encryption
to \"aws:kms\"
. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.
The KMS key policy must grant permission to the IAM role that you specify in your CreateEndpoint
and UpdateEndpoint
requests. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.
The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data from a Redshift execution.
", "ResourceConfig$VolumeKmsKeyId": "The AWS KMS key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the training job.
Certain Nitro-based instances include local storage, dependent on the instance type. Local storage volumes are encrypted using a hardware module on the instance. You can't request a VolumeKmsKeyId
when using an instance type with local storage.
For a list of instance types that support local instance storage, see Instance Store Volumes.
For more information about local instance storage encryption, see SSD Instance Store Volumes.
The VolumeKmsKeyId
can be in any of the following formats:
// KMS Key ID
\"1234abcd-12ab-34cd-56ef-1234567890ab\"
// Amazon Resource Name (ARN) of a KMS Key
\"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"
The AWS Key Management Service (KMS) key ID of the key used to encrypt any objects written into the OfflineStore
S3 location.
The IAM roleARN
that is passed as a parameter to CreateFeatureGroup
must have below permissions to the KmsKeyId
:
\"kms:GenerateDataKey\"
The size of the Elastic Inference (EI) instance to use for the production variant. EI instances provide on-demand GPU computing for inference. For more information, see Using Elastic Inference in Amazon SageMaker.
" } }, + "ProductionVariantCoreDumpConfig": { + "base": "Specifies configuration for a core dump from the model container when the process crashes.
", + "refs": { + "ProductionVariant$CoreDumpConfig": "Specifies configuration for a core dump from the model container when the process crashes.
" + } + }, "ProductionVariantInstanceType": { "base": null, "refs": { @@ -8917,7 +8938,7 @@ "TaskAvailabilityLifetimeInSeconds": { "base": null, "refs": { - "HumanTaskConfig$TaskAvailabilityLifetimeInSeconds": "The length of time that a task remains available for labeling by human workers. The default and maximum values for this parameter depend on the type of workforce you use.
If you choose the Amazon Mechanical Turk workforce, the maximum is 12 hours (43,200 seconds). The default is 6 hours (21,600 seconds).
If you choose a private or vendor workforce, the default value is 10 days (864,000 seconds). For most users, the maximum is also 10 days.
The length of time that a task remains available for labeling by human workers. The default and maximum values for this parameter depend on the type of workforce you use.
If you choose the Amazon Mechanical Turk workforce, the maximum is 12 hours (43,200 seconds). The default is 6 hours (21,600 seconds).
If you choose a private or vendor workforce, the default value is 10 days (864,000 seconds). For most users, the maximum is also 10 days. If you want to change this limit, contact AWS Support.
The amount of time that a worker has to complete a task.
If you create a custom labeling job, the maximum value for this parameter is 8 hours (28,800 seconds).
If you create a labeling job using a built-in task type the maximum for this parameter depends on the task type you use:
For image and text labeling jobs, the maximum is 8 hours (28,800 seconds).
For 3D point cloud and video frame labeling jobs, the maximum is 7 days (604,800 seconds).
The amount of time that a worker has to complete a task.
If you create a custom labeling job, the maximum value for this parameter is 8 hours (28,800 seconds).
If you create a labeling job using a built-in task type the maximum for this parameter depends on the task type you use:
For image and text labeling jobs, the maximum is 8 hours (28,800 seconds).
For 3D point cloud and video frame labeling jobs, the maximum is 7 days (604,800 seconds). If you want to change these limits, contact AWS Support.