diff --git a/clients/client-bedrock-agent/src/commands/CreateAgentCommand.ts b/clients/client-bedrock-agent/src/commands/CreateAgentCommand.ts index e9c7a27e79b46..73049ea795973 100644 --- a/clients/client-bedrock-agent/src/commands/CreateAgentCommand.ts +++ b/clients/client-bedrock-agent/src/commands/CreateAgentCommand.ts @@ -59,7 +59,10 @@ export interface CreateAgentCommandOutput extends CreateAgentResponse, __Metadat * For more information, see Advanced prompts.
* *If you agent fails to be created, the response returns a list of failureReasons
alongside a list of recommendedActions
for you to troubleshoot.
If your agent fails to be created, the response returns a list of failureReasons
alongside a list of recommendedActions
for you to troubleshoot.
The agent instructions will not be honored if your agent has only one knowledge base, uses default prompts, has no action group, and user input is disabled.
*You can get the ARN of a model with the action. Standard model usage + *
You can get the ARN of a model with the ListFoundationModels action. Standard model usage * charges apply for the foundation model parsing strategy.
* @public */ @@ -7437,6 +7437,24 @@ export interface UpdateKnowledgeBaseResponse { knowledgeBase: KnowledgeBase | undefined; } +/** + *Contains a key-value pair that defines a metadata tag and value to attach to a prompt variant. For more information, see Create a prompt using Prompt management.
+ * @public + */ +export interface PromptMetadataEntry { + /** + *The key of a metadata tag for a prompt variant.
+ * @public + */ + key: string | undefined; + + /** + *The value of a metadata tag for a prompt variant.
+ * @public + */ + value: string | undefined; +} + /** *Contains details about a variant of the prompt.
* @public @@ -7471,6 +7489,12 @@ export interface PromptVariant { * @public */ inferenceConfiguration?: PromptInferenceConfiguration; + + /** + *An array of objects, each containing a key-value pair that defines a metadata tag and value to attach to a prompt variant. For more information, see Create a prompt using Prompt management.
+ * @public + */ + metadata?: PromptMetadataEntry[]; } /** @@ -8672,6 +8696,15 @@ export const UpdateFlowResponseFilterSensitiveLog = (obj: UpdateFlowResponse): a ...(obj.definition && { definition: FlowDefinitionFilterSensitiveLog(obj.definition) }), }); +/** + * @internal + */ +export const PromptMetadataEntryFilterSensitiveLog = (obj: PromptMetadataEntry): any => ({ + ...obj, + ...(obj.key && { key: SENSITIVE_STRING }), + ...(obj.value && { value: SENSITIVE_STRING }), +}); + /** * @internal */ @@ -8681,6 +8714,7 @@ export const PromptVariantFilterSensitiveLog = (obj: PromptVariant): any => ({ templateConfiguration: PromptTemplateConfigurationFilterSensitiveLog(obj.templateConfiguration), }), ...(obj.inferenceConfiguration && { inferenceConfiguration: obj.inferenceConfiguration }), + ...(obj.metadata && { metadata: SENSITIVE_STRING }), }); /** diff --git a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts index 1d3a4cf048b0a..169b45b19443c 100644 --- a/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts +++ b/clients/client-bedrock-agent/src/protocols/Aws_restJson1.ts @@ -226,6 +226,7 @@ import { PromptFlowNodeSourceConfiguration, PromptInferenceConfiguration, PromptInputVariable, + PromptMetadataEntry, PromptModelInferenceConfiguration, PromptOverrideConfiguration, PromptSummary, @@ -3480,6 +3481,10 @@ const se_PromptInferenceConfiguration = (input: PromptInferenceConfiguration, co // se_PromptInputVariablesList omitted. +// se_PromptMetadataEntry omitted. + +// se_PromptMetadataList omitted. + /** * serializeAws_restJson1PromptModelInferenceConfiguration */ @@ -3514,6 +3519,7 @@ const se_PromptOverrideConfiguration = (input: PromptOverrideConfiguration, cont const se_PromptVariant = (input: PromptVariant, context: __SerdeContext): any => { return take(input, { inferenceConfiguration: (_) => se_PromptInferenceConfiguration(_, context), + metadata: _json, modelId: [], name: [], templateConfiguration: _json, @@ -4426,6 +4432,10 @@ const de_PromptInferenceConfiguration = (output: any, context: __SerdeContext): // de_PromptInputVariablesList omitted. +// de_PromptMetadataEntry omitted. + +// de_PromptMetadataList omitted. + /** * deserializeAws_restJson1PromptModelInferenceConfiguration */ @@ -4487,6 +4497,7 @@ const de_PromptSummary = (output: any, context: __SerdeContext): PromptSummary = const de_PromptVariant = (output: any, context: __SerdeContext): PromptVariant => { return take(output, { inferenceConfiguration: (_: any) => de_PromptInferenceConfiguration(__expectUnion(_), context), + metadata: _json, modelId: __expectString, name: __expectString, templateConfiguration: (_: any) => _json(__expectUnion(_)),