Skip to content

Commit

Permalink
feat(client-bedrock-agent): Add support of new model types for Bedroc…
Browse files Browse the repository at this point in the history
…k Agents, Adding inference profile support for Flows and Prompt Management, Adding new field to configure additional inference configurations for Flows and Prompt Management
  • Loading branch information
awstools committed Oct 25, 2024
1 parent e45fde3 commit 5cf2e4b
Show file tree
Hide file tree
Showing 13 changed files with 119 additions and 24 deletions.
2 changes: 1 addition & 1 deletion clients/client-bedrock-agent/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ AWS SDK for JavaScript BedrockAgent Client for Node.js, Browser and React Native

## Installing

To install the this package, simply type add or install @aws-sdk/client-bedrock-agent
To install this package, simply type add or install @aws-sdk/client-bedrock-agent
using your favorite package manager:

- `npm install @aws-sdk/client-bedrock-agent`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
* ],
* },
* },
* additionalModelRequestFields: "DOCUMENT_VALUE",
* },
* },
* },
Expand Down Expand Up @@ -226,6 +227,7 @@ export interface CreateFlowCommandOutput extends CreateFlowResponse, __MetadataB
* // ],
* // },
* // },
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // },
* // },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ export interface CreateFlowVersionCommandOutput extends CreateFlowVersionRespons
* // ],
* // },
* // },
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // },
* // },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
* value: "STRING_VALUE", // required
* },
* ],
* additionalModelRequestFields: "DOCUMENT_VALUE",
* },
* ],
* clientToken: "STRING_VALUE",
Expand Down Expand Up @@ -121,6 +122,7 @@ export interface CreatePromptCommandOutput extends CreatePromptResponse, __Metad
* // value: "STRING_VALUE", // required
* // },
* // ],
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // ],
* // id: "STRING_VALUE", // required
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ export interface CreatePromptVersionCommandOutput extends CreatePromptVersionRes
* // value: "STRING_VALUE", // required
* // },
* // ],
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // ],
* // id: "STRING_VALUE", // required
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ export interface GetFlowCommandOutput extends GetFlowResponse, __MetadataBearer
* // ],
* // },
* // },
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // },
* // },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ export interface GetFlowVersionCommandOutput extends GetFlowVersionResponse, __M
* // ],
* // },
* // },
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // },
* // },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ export interface GetPromptCommandOutput extends GetPromptResponse, __MetadataBea
* // value: "STRING_VALUE", // required
* // },
* // ],
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // ],
* // id: "STRING_VALUE", // required
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
* ],
* },
* },
* additionalModelRequestFields: "DOCUMENT_VALUE",
* },
* },
* },
Expand Down Expand Up @@ -223,6 +224,7 @@ export interface UpdateFlowCommandOutput extends UpdateFlowResponse, __MetadataB
* // ],
* // },
* // },
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // },
* // },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
* value: "STRING_VALUE", // required
* },
* ],
* additionalModelRequestFields: "DOCUMENT_VALUE",
* },
* ],
* promptIdentifier: "STRING_VALUE", // required
Expand Down Expand Up @@ -118,6 +119,7 @@ export interface UpdatePromptCommandOutput extends UpdatePromptResponse, __Metad
* // value: "STRING_VALUE", // required
* // },
* // ],
* // additionalModelRequestFields: "DOCUMENT_VALUE",
* // },
* // ],
* // id: "STRING_VALUE", // required
Expand Down
72 changes: 61 additions & 11 deletions clients/client-bedrock-agent/src/models/models_0.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
// smithy-typescript generated code
import { ExceptionOptionType as __ExceptionOptionType, SENSITIVE_STRING } from "@smithy/smithy-client";

import { DocumentType as __DocumentType } from "@smithy/types";

import { BedrockAgentServiceException as __BaseException } from "./BedrockAgentServiceException";

/**
Expand Down Expand Up @@ -1648,7 +1650,25 @@ export interface CreateAgentRequest {
instruction?: string;

/**
* <p>The Amazon Resource Name (ARN) of the foundation model to be used for orchestration by the agent you create.</p>
* <p>The identifier for the model that you want to be used for orchestration by the agent you create.</p>
* <p>The <code>modelId</code> to provide depends on the type of model or throughput that you use:</p>
* <ul>
* <li>
* <p>If you use a base model, specify the model ID or its ARN. For a list of model IDs for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use an inference profile, specify the inference profile ID or its ARN. For a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use a custom model, first purchase Provisioned Throughput for it. Then specify the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use an <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported model</a>, specify the ARN of the imported model. You can get the model ARN from a successful call to <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateModelImportJob.html">CreateModelImportJob</a> or from the Imported models page in the Amazon Bedrock console.</p>
* </li>
* </ul>
* @public
*/
foundationModel?: string;
Expand Down Expand Up @@ -1915,7 +1935,25 @@ export interface UpdateAgentRequest {
instruction?: string;

/**
* <p>Specifies a new foundation model to be used for orchestration by the agent.</p>
* <p>The identifier for the model that you want to be used for orchestration by the agent you create.</p>
* <p>The <code>modelId</code> to provide depends on the type of model or throughput that you use:</p>
* <ul>
* <li>
* <p>If you use a base model, specify the model ID or its ARN. For a list of model IDs for base models, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-ids.html#model-ids-arns">Amazon Bedrock base model IDs (on-demand throughput)</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use an inference profile, specify the inference profile ID or its ARN. For a list of inference profile IDs, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/cross-region-inference-support.html">Supported Regions and models for cross-region inference</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use a provisioned model, specify the ARN of the Provisioned Throughput. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/prov-thru-use.html">Run inference using a Provisioned Throughput</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use a custom model, first purchase Provisioned Throughput for it. Then specify the ARN of the resulting provisioned model. For more information, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-use.html">Use a custom model in Amazon Bedrock</a> in the Amazon Bedrock User Guide.</p>
* </li>
* <li>
* <p>If you use an <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html">imported model</a>, specify the ARN of the imported model. You can get the model ARN from a successful call to <a href="https://docs.aws.amazon.com/bedrock/latest/APIReference/API_CreateModelImportJob.html">CreateModelImportJob</a> or from the Imported models page in the Amazon Bedrock console.</p>
* </li>
* </ul>
* @public
*/
foundationModel: string | undefined;
Expand Down Expand Up @@ -4136,6 +4174,12 @@ export interface PromptFlowNodeInlineConfiguration {
* @public
*/
inferenceConfiguration?: PromptInferenceConfiguration;

/**
* <p>Contains model-specific inference configurations that aren't in the <code>inferenceConfiguration</code> field. To see model-specific inference parameters, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference request parameters and response fields for foundation models</a>.</p>
* @public
*/
additionalModelRequestFields?: __DocumentType;
}

/**
Expand Down Expand Up @@ -7527,6 +7571,12 @@ export interface PromptVariant {
* @public
*/
metadata?: PromptMetadataEntry[];

/**
* <p>Contains model-specific inference configurations that aren't in the <code>inferenceConfiguration</code> field. To see model-specific inference parameters, see <a href="https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters.html">Inference request parameters and response fields for foundation models</a>.</p>
* @public
*/
additionalModelRequestFields?: __DocumentType;
}

/**
Expand Down Expand Up @@ -8576,7 +8626,7 @@ export const FlowConditionFilterSensitiveLog = (obj: FlowCondition): any => ({
*/
export const ConditionFlowNodeConfigurationFilterSensitiveLog = (obj: ConditionFlowNodeConfiguration): any => ({
...obj,
...(obj.conditions && { conditions: SENSITIVE_STRING }),
...(obj.conditions && { conditions: obj.conditions.map((item) => FlowConditionFilterSensitiveLog(item)) }),
});

/**
Expand Down Expand Up @@ -8668,7 +8718,7 @@ export const FlowNodeFilterSensitiveLog = (obj: FlowNode): any => ({
*/
export const FlowDefinitionFilterSensitiveLog = (obj: FlowDefinition): any => ({
...obj,
...(obj.nodes && { nodes: SENSITIVE_STRING }),
...(obj.nodes && { nodes: obj.nodes.map((item) => FlowNodeFilterSensitiveLog(item)) }),
...(obj.connections && { connections: obj.connections.map((item) => item) }),
});

Expand All @@ -8677,55 +8727,55 @@ export const FlowDefinitionFilterSensitiveLog = (obj: FlowDefinition): any => ({
*/
export const CreateFlowRequestFilterSensitiveLog = (obj: CreateFlowRequest): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const CreateFlowResponseFilterSensitiveLog = (obj: CreateFlowResponse): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const CreateFlowVersionResponseFilterSensitiveLog = (obj: CreateFlowVersionResponse): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const GetFlowVersionResponseFilterSensitiveLog = (obj: GetFlowVersionResponse): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const GetFlowResponseFilterSensitiveLog = (obj: GetFlowResponse): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const UpdateFlowRequestFilterSensitiveLog = (obj: UpdateFlowRequest): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
* @internal
*/
export const UpdateFlowResponseFilterSensitiveLog = (obj: UpdateFlowResponse): any => ({
...obj,
...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }),
...(obj.definition && { definition: SENSITIVE_STRING }),
});

/**
Expand Down
19 changes: 19 additions & 0 deletions clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ import {
withBaseException,
} from "@smithy/smithy-client";
import {
DocumentType as __DocumentType,
Endpoint as __Endpoint,
ResponseMetadata as __ResponseMetadata,
SerdeContext as __SerdeContext,
Expand Down Expand Up @@ -3481,6 +3482,7 @@ const se_PromptFlowNodeInlineConfiguration = (
context: __SerdeContext
): any => {
return take(input, {
additionalModelRequestFields: (_) => se_Document(_, context),
inferenceConfiguration: (_) => se_PromptInferenceConfiguration(_, context),
modelId: [],
templateConfiguration: _json,
Expand Down Expand Up @@ -3554,6 +3556,7 @@ const se_PromptOverrideConfiguration = (input: PromptOverrideConfiguration, cont
*/
const se_PromptVariant = (input: PromptVariant, context: __SerdeContext): any => {
return take(input, {
additionalModelRequestFields: (_) => se_Document(_, context),
inferenceConfiguration: (_) => se_PromptInferenceConfiguration(_, context),
metadata: _json,
modelId: [],
Expand Down Expand Up @@ -3654,6 +3657,13 @@ const se_PromptVariantList = (input: PromptVariant[], context: __SerdeContext):

// se_WebSourceConfiguration omitted.

/**
* serializeAws_restJson1Document
*/
const se_Document = (input: __DocumentType, context: __SerdeContext): any => {
return input;
};

// de_ActionGroupExecutor omitted.

/**
Expand Down Expand Up @@ -4423,6 +4433,7 @@ const de_PromptFlowNodeInlineConfiguration = (
context: __SerdeContext
): PromptFlowNodeInlineConfiguration => {
return take(output, {
additionalModelRequestFields: (_: any) => de_Document(_, context),
inferenceConfiguration: (_: any) => de_PromptInferenceConfiguration(__expectUnion(_), context),
modelId: __expectString,
templateConfiguration: (_: any) => _json(__expectUnion(_)),
Expand Down Expand Up @@ -4531,6 +4542,7 @@ const de_PromptSummary = (output: any, context: __SerdeContext): PromptSummary =
*/
const de_PromptVariant = (output: any, context: __SerdeContext): PromptVariant => {
return take(output, {
additionalModelRequestFields: (_: any) => de_Document(_, context),
inferenceConfiguration: (_: any) => de_PromptInferenceConfiguration(__expectUnion(_), context),
metadata: _json,
modelId: __expectString,
Expand Down Expand Up @@ -4638,6 +4650,13 @@ const de_PromptVariantList = (output: any, context: __SerdeContext): PromptVaria

// de_WebSourceConfiguration omitted.

/**
* deserializeAws_restJson1Document
*/
const de_Document = (output: any, context: __SerdeContext): __DocumentType => {
return output;
};

const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({
httpStatusCode: output.statusCode,
requestId:
Expand Down
Loading

0 comments on commit 5cf2e4b

Please sign in to comment.