diff --git a/eng/MSBuild/Shared.props b/eng/MSBuild/Shared.props
index a68b0e4298f..dee583f7e39 100644
--- a/eng/MSBuild/Shared.props
+++ b/eng/MSBuild/Shared.props
@@ -14,6 +14,10 @@
+
+
+
+
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/JsonModelHelpers.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/JsonModelHelpers.cs
new file mode 100644
index 00000000000..5f6b92d2f01
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/JsonModelHelpers.cs
@@ -0,0 +1,31 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.ClientModel.Primitives;
+
+namespace Microsoft.Extensions.AI;
+
+///
+/// Defines a set of helper methods for working with types.
+///
+internal static class JsonModelHelpers
+{
+ public static BinaryData Serialize(TModel value)
+ where TModel : IJsonModel
+ {
+ return value.Write(ModelReaderWriterOptions.Json);
+ }
+
+ public static TModel Deserialize(BinaryData data)
+ where TModel : IJsonModel, new()
+ {
+ return JsonModelDeserializationWitness.Value.Create(data, ModelReaderWriterOptions.Json);
+ }
+
+ private sealed class JsonModelDeserializationWitness
+ where TModel : IJsonModel, new()
+ {
+ public static readonly IJsonModel Value = new TModel();
+ }
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.csproj b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.csproj
index d3d09766d69..d3e969337e6 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.csproj
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/Microsoft.Extensions.AI.OpenAI.csproj
@@ -18,11 +18,15 @@
$(NoWarn);CA1063;CA1508;CA2227;SA1316;S1121;S3358;EA0002;OPENAI002
true
true
+ true
true
true
+ true
+ true
+ true
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
index 09ad9aa18ac..d0ec35d1e22 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatClient.cs
@@ -4,11 +4,7 @@
using System;
using System.Collections.Generic;
using System.Reflection;
-using System.Runtime.CompilerServices;
-using System.Text;
using System.Text.Json;
-using System.Text.Json.Serialization;
-using System.Text.Json.Serialization.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Shared.Diagnostics;
@@ -16,7 +12,6 @@
using OpenAI.Chat;
#pragma warning disable S1067 // Expressions should not be too complex
-#pragma warning disable S1135 // Track uses of "TODO" tags
#pragma warning disable S3011 // Reflection should not be used to increase accessibility of classes, methods, or fields
#pragma warning disable SA1204 // Static elements should appear before instance elements
#pragma warning disable SA1108 // Block statements should not contain embedded comments
@@ -26,8 +21,6 @@ namespace Microsoft.Extensions.AI;
/// Represents an for an OpenAI or .
public sealed class OpenAIChatClient : IChatClient
{
- private static readonly JsonElement _defaultParameterSchema = JsonDocument.Parse("{}").RootElement;
-
/// Default OpenAI endpoint.
private static readonly Uri _defaultOpenAIEndpoint = new("https://api.openai.com/v1");
@@ -110,224 +103,28 @@ public async Task CompleteAsync(
{
_ = Throw.IfNull(chatMessages);
- // Make the call to OpenAI.
- OpenAI.Chat.ChatCompletion response = (await _chatClient.CompleteChatAsync(
- ToOpenAIChatMessages(chatMessages),
- ToOpenAIOptions(options),
- cancellationToken).ConfigureAwait(false)).Value;
-
- // Create the return message.
- ChatMessage returnMessage = new()
- {
- RawRepresentation = response,
- Role = ToChatRole(response.Role),
- };
-
- // Populate its content from those in the OpenAI response content.
- foreach (ChatMessageContentPart contentPart in response.Content)
- {
- if (ToAIContent(contentPart) is AIContent aiContent)
- {
- returnMessage.Contents.Add(aiContent);
- }
- }
-
- // Also manufacture function calling content items from any tool calls in the response.
- if (options?.Tools is { Count: > 0 })
- {
- foreach (ChatToolCall toolCall in response.ToolCalls)
- {
- if (!string.IsNullOrWhiteSpace(toolCall.FunctionName))
- {
- var callContent = ParseCallContentFromBinaryData(toolCall.FunctionArguments, toolCall.Id, toolCall.FunctionName);
- callContent.RawRepresentation = toolCall;
-
- returnMessage.Contents.Add(callContent);
- }
- }
- }
-
- // Wrap the content in a ChatCompletion to return.
- var completion = new ChatCompletion([returnMessage])
- {
- RawRepresentation = response,
- CompletionId = response.Id,
- CreatedAt = response.CreatedAt,
- ModelId = response.Model,
- FinishReason = ToFinishReason(response.FinishReason),
- };
+ var openAIChatMessages = OpenAIModelMappers.ToOpenAIChatMessages(chatMessages, ToolCallJsonSerializerOptions);
+ var openAIOptions = OpenAIModelMappers.ToOpenAIOptions(options);
- if (response.Usage is ChatTokenUsage tokenUsage)
- {
- completion.Usage = ToUsageDetails(tokenUsage);
- }
-
- if (response.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
- {
- (completion.AdditionalProperties ??= [])[nameof(response.ContentTokenLogProbabilities)] = contentTokenLogProbs;
- }
-
- if (response.Refusal is string refusal)
- {
- (completion.AdditionalProperties ??= [])[nameof(response.Refusal)] = refusal;
- }
-
- if (response.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
- {
- (completion.AdditionalProperties ??= [])[nameof(response.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
- }
-
- if (response.SystemFingerprint is string systemFingerprint)
- {
- (completion.AdditionalProperties ??= [])[nameof(response.SystemFingerprint)] = systemFingerprint;
- }
+ // Make the call to OpenAI.
+ var response = await _chatClient.CompleteChatAsync(openAIChatMessages, openAIOptions, cancellationToken).ConfigureAwait(false);
- return completion;
+ return OpenAIModelMappers.FromOpenAIChatCompletion(response.Value, options);
}
///
- public async IAsyncEnumerable CompleteStreamingAsync(
- IList chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ public IAsyncEnumerable CompleteStreamingAsync(
+ IList chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
_ = Throw.IfNull(chatMessages);
- Dictionary? functionCallInfos = null;
- ChatRole? streamedRole = null;
- ChatFinishReason? finishReason = null;
- StringBuilder? refusal = null;
- string? completionId = null;
- DateTimeOffset? createdAt = null;
- string? modelId = null;
- string? fingerprint = null;
-
- // Process each update as it arrives
- await foreach (OpenAI.Chat.StreamingChatCompletionUpdate chatCompletionUpdate in _chatClient.CompleteChatStreamingAsync(
- ToOpenAIChatMessages(chatMessages), ToOpenAIOptions(options), cancellationToken).ConfigureAwait(false))
- {
- // The role and finish reason may arrive during any update, but once they've arrived, the same value should be the same for all subsequent updates.
- streamedRole ??= chatCompletionUpdate.Role is ChatMessageRole role ? ToChatRole(role) : null;
- finishReason ??= chatCompletionUpdate.FinishReason is OpenAI.Chat.ChatFinishReason reason ? ToFinishReason(reason) : null;
- completionId ??= chatCompletionUpdate.CompletionId;
- createdAt ??= chatCompletionUpdate.CreatedAt;
- modelId ??= chatCompletionUpdate.Model;
- fingerprint ??= chatCompletionUpdate.SystemFingerprint;
-
- // Create the response content object.
- StreamingChatCompletionUpdate completionUpdate = new()
- {
- CompletionId = chatCompletionUpdate.CompletionId,
- CreatedAt = chatCompletionUpdate.CreatedAt,
- FinishReason = finishReason,
- ModelId = modelId,
- RawRepresentation = chatCompletionUpdate,
- Role = streamedRole,
- };
-
- // Populate it with any additional metadata from the OpenAI object.
- if (chatCompletionUpdate.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
- {
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.ContentTokenLogProbabilities)] = contentTokenLogProbs;
- }
-
- if (chatCompletionUpdate.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
- {
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
- }
-
- if (fingerprint is not null)
- {
- (completionUpdate.AdditionalProperties ??= [])[nameof(chatCompletionUpdate.SystemFingerprint)] = fingerprint;
- }
-
- // Transfer over content update items.
- if (chatCompletionUpdate.ContentUpdate is { Count: > 0 })
- {
- foreach (ChatMessageContentPart contentPart in chatCompletionUpdate.ContentUpdate)
- {
- if (ToAIContent(contentPart) is AIContent aiContent)
- {
- completionUpdate.Contents.Add(aiContent);
- }
- }
- }
-
- // Transfer over refusal updates.
- if (chatCompletionUpdate.RefusalUpdate is not null)
- {
- _ = (refusal ??= new()).Append(chatCompletionUpdate.RefusalUpdate);
- }
-
- // Transfer over tool call updates.
- if (chatCompletionUpdate.ToolCallUpdates is { Count: > 0 } toolCallUpdates)
- {
- foreach (StreamingChatToolCallUpdate toolCallUpdate in toolCallUpdates)
- {
- functionCallInfos ??= [];
- if (!functionCallInfos.TryGetValue(toolCallUpdate.Index, out FunctionCallInfo? existing))
- {
- functionCallInfos[toolCallUpdate.Index] = existing = new();
- }
-
- existing.CallId ??= toolCallUpdate.ToolCallId;
- existing.Name ??= toolCallUpdate.FunctionName;
- if (toolCallUpdate.FunctionArgumentsUpdate is { } update && !update.ToMemory().IsEmpty)
- {
- _ = (existing.Arguments ??= new()).Append(update.ToString());
- }
- }
- }
-
- // Transfer over usage updates.
- if (chatCompletionUpdate.Usage is ChatTokenUsage tokenUsage)
- {
- var usageDetails = ToUsageDetails(tokenUsage);
- completionUpdate.Contents.Add(new UsageContent(usageDetails));
- }
-
- // Now yield the item.
- yield return completionUpdate;
- }
-
- // Now that we've received all updates, combine any for function calls into a single item to yield.
- if (functionCallInfos is not null)
- {
- StreamingChatCompletionUpdate completionUpdate = new()
- {
- CompletionId = completionId,
- CreatedAt = createdAt,
- FinishReason = finishReason,
- ModelId = modelId,
- Role = streamedRole,
- };
+ var openAIChatMessages = OpenAIModelMappers.ToOpenAIChatMessages(chatMessages, ToolCallJsonSerializerOptions);
+ var openAIOptions = OpenAIModelMappers.ToOpenAIOptions(options);
- foreach (var entry in functionCallInfos)
- {
- FunctionCallInfo fci = entry.Value;
- if (!string.IsNullOrWhiteSpace(fci.Name))
- {
- var callContent = ParseCallContentFromJsonString(
- fci.Arguments?.ToString() ?? string.Empty,
- fci.CallId!,
- fci.Name!);
- completionUpdate.Contents.Add(callContent);
- }
- }
-
- // Refusals are about the model not following the schema for tool calls. As such, if we have any refusal,
- // add it to this function calling item.
- if (refusal is not null)
- {
- (completionUpdate.AdditionalProperties ??= [])[nameof(ChatMessageContentPart.Refusal)] = refusal.ToString();
- }
-
- // Propagate additional relevant metadata.
- if (fingerprint is not null)
- {
- (completionUpdate.AdditionalProperties ??= [])[nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)] = fingerprint;
- }
+ // Make the call to OpenAI.
+ var chatCompletionUpdates = _chatClient.CompleteChatStreamingAsync(openAIChatMessages, openAIOptions, cancellationToken);
- yield return completionUpdate;
- }
+ return OpenAIModelMappers.FromOpenAIStreamingChatCompletionAsync(chatCompletionUpdates, cancellationToken);
}
///
@@ -335,376 +132,4 @@ void IDisposable.Dispose()
{
// Nothing to dispose. Implementation required for the IChatClient interface.
}
-
- /// POCO representing function calling info. Used to concatenation information for a single function call from across multiple streaming updates.
- private sealed class FunctionCallInfo
- {
- public string? CallId;
- public string? Name;
- public StringBuilder? Arguments;
- }
-
- private static UsageDetails ToUsageDetails(ChatTokenUsage tokenUsage)
- {
- var destination = new UsageDetails
- {
- InputTokenCount = tokenUsage.InputTokenCount,
- OutputTokenCount = tokenUsage.OutputTokenCount,
- TotalTokenCount = tokenUsage.TotalTokenCount,
- AdditionalCounts = new(),
- };
-
- if (tokenUsage.InputTokenDetails is ChatInputTokenUsageDetails inputDetails)
- {
- destination.AdditionalCounts.Add(
- $"{nameof(ChatTokenUsage.InputTokenDetails)}.{nameof(ChatInputTokenUsageDetails.AudioTokenCount)}",
- inputDetails.AudioTokenCount);
-
- destination.AdditionalCounts.Add(
- $"{nameof(ChatTokenUsage.InputTokenDetails)}.{nameof(ChatInputTokenUsageDetails.CachedTokenCount)}",
- inputDetails.CachedTokenCount);
- }
-
- if (tokenUsage.OutputTokenDetails is ChatOutputTokenUsageDetails outputDetails)
- {
- destination.AdditionalCounts.Add(
- $"{nameof(ChatTokenUsage.OutputTokenDetails)}.{nameof(ChatOutputTokenUsageDetails.AudioTokenCount)}",
- outputDetails.AudioTokenCount);
-
- destination.AdditionalCounts.Add(
- $"{nameof(ChatTokenUsage.OutputTokenDetails)}.{nameof(ChatOutputTokenUsageDetails.ReasoningTokenCount)}",
- outputDetails.ReasoningTokenCount);
- }
-
- return destination;
- }
-
- /// Converts an OpenAI role to an Extensions role.
- private static ChatRole ToChatRole(ChatMessageRole role) =>
- role switch
- {
- ChatMessageRole.System => ChatRole.System,
- ChatMessageRole.User => ChatRole.User,
- ChatMessageRole.Assistant => ChatRole.Assistant,
- ChatMessageRole.Tool => ChatRole.Tool,
- _ => new ChatRole(role.ToString()),
- };
-
- /// Converts an OpenAI finish reason to an Extensions finish reason.
- private static ChatFinishReason? ToFinishReason(OpenAI.Chat.ChatFinishReason? finishReason) =>
- finishReason?.ToString() is not string s ? null :
- finishReason switch
- {
- OpenAI.Chat.ChatFinishReason.Stop => ChatFinishReason.Stop,
- OpenAI.Chat.ChatFinishReason.Length => ChatFinishReason.Length,
- OpenAI.Chat.ChatFinishReason.ContentFilter => ChatFinishReason.ContentFilter,
- OpenAI.Chat.ChatFinishReason.ToolCalls or OpenAI.Chat.ChatFinishReason.FunctionCall => ChatFinishReason.ToolCalls,
- _ => new ChatFinishReason(s),
- };
-
- /// Converts an extensions options instance to an OpenAI options instance.
- private static ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
- {
- ChatCompletionOptions result = new();
-
- if (options is not null)
- {
- result.FrequencyPenalty = options.FrequencyPenalty;
- result.MaxOutputTokenCount = options.MaxOutputTokens;
- result.TopP = options.TopP;
- result.PresencePenalty = options.PresencePenalty;
- result.Temperature = options.Temperature;
-#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates.
- result.Seed = options.Seed;
-#pragma warning restore OPENAI001
-
- if (options.StopSequences is { Count: > 0 } stopSequences)
- {
- foreach (string stopSequence in stopSequences)
- {
- result.StopSequences.Add(stopSequence);
- }
- }
-
- if (options.AdditionalProperties is { Count: > 0 } additionalProperties)
- {
- if (additionalProperties.TryGetValue(nameof(result.EndUserId), out string? endUserId))
- {
- result.EndUserId = endUserId;
- }
-
- if (additionalProperties.TryGetValue(nameof(result.IncludeLogProbabilities), out bool includeLogProbabilities))
- {
- result.IncludeLogProbabilities = includeLogProbabilities;
- }
-
- if (additionalProperties.TryGetValue(nameof(result.LogitBiases), out IDictionary? logitBiases))
- {
- foreach (KeyValuePair kvp in logitBiases!)
- {
- result.LogitBiases[kvp.Key] = kvp.Value;
- }
- }
-
- if (additionalProperties.TryGetValue(nameof(result.AllowParallelToolCalls), out bool allowParallelToolCalls))
- {
- result.AllowParallelToolCalls = allowParallelToolCalls;
- }
-
- if (additionalProperties.TryGetValue(nameof(result.TopLogProbabilityCount), out int topLogProbabilityCountInt))
- {
- result.TopLogProbabilityCount = topLogProbabilityCountInt;
- }
-
- if (additionalProperties.TryGetValue(nameof(result.Metadata), out IDictionary? metadata))
- {
- foreach (KeyValuePair kvp in metadata)
- {
- result.Metadata[kvp.Key] = kvp.Value;
- }
- }
-
- if (additionalProperties.TryGetValue(nameof(result.StoredOutputEnabled), out bool storeOutputEnabled))
- {
- result.StoredOutputEnabled = storeOutputEnabled;
- }
- }
-
- if (options.Tools is { Count: > 0 } tools)
- {
- foreach (AITool tool in tools)
- {
- if (tool is AIFunction af)
- {
- result.Tools.Add(ToOpenAIChatTool(af));
- }
- }
-
- switch (options.ToolMode)
- {
- case AutoChatToolMode:
- result.ToolChoice = ChatToolChoice.CreateAutoChoice();
- break;
-
- case RequiredChatToolMode required:
- result.ToolChoice = required.RequiredFunctionName is null ?
- ChatToolChoice.CreateRequiredChoice() :
- ChatToolChoice.CreateFunctionChoice(required.RequiredFunctionName);
- break;
- }
- }
-
- if (options.ResponseFormat is ChatResponseFormatText)
- {
- result.ResponseFormat = OpenAI.Chat.ChatResponseFormat.CreateTextFormat();
- }
- else if (options.ResponseFormat is ChatResponseFormatJson jsonFormat)
- {
- result.ResponseFormat = jsonFormat.Schema is { } jsonSchema ?
- OpenAI.Chat.ChatResponseFormat.CreateJsonSchemaFormat(
- jsonFormat.SchemaName ?? "json_schema",
- BinaryData.FromBytes(
- JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
- jsonFormat.SchemaDescription) :
- OpenAI.Chat.ChatResponseFormat.CreateJsonObjectFormat();
- }
- }
-
- return result;
- }
-
- /// Converts an Extensions function to an OpenAI chat tool.
- private static ChatTool ToOpenAIChatTool(AIFunction aiFunction)
- {
- bool? strict =
- aiFunction.Metadata.AdditionalProperties.TryGetValue("Strict", out object? strictObj) &&
- strictObj is bool strictValue ?
- strictValue : null;
-
- BinaryData resultParameters = OpenAIChatToolJson.ZeroFunctionParametersSchema;
-
- var parameters = aiFunction.Metadata.Parameters;
- if (parameters is { Count: > 0 })
- {
- OpenAIChatToolJson tool = new();
-
- foreach (AIFunctionParameterMetadata parameter in parameters)
- {
- tool.Properties.Add(parameter.Name, parameter.Schema is JsonElement e ? e : _defaultParameterSchema);
-
- if (parameter.IsRequired)
- {
- tool.Required.Add(parameter.Name);
- }
- }
-
- resultParameters = BinaryData.FromBytes(
- JsonSerializer.SerializeToUtf8Bytes(tool, OpenAIJsonContext.Default.OpenAIChatToolJson));
- }
-
- return ChatTool.CreateFunctionTool(aiFunction.Metadata.Name, aiFunction.Metadata.Description, resultParameters, strict);
- }
-
- /// Used to create the JSON payload for an OpenAI chat tool description.
- internal sealed class OpenAIChatToolJson
- {
- /// Gets a singleton JSON data for empty parameters. Optimization for the reasonably common case of a parameterless function.
- public static BinaryData ZeroFunctionParametersSchema { get; } = new("""{"type":"object","required":[],"properties":{}}"""u8.ToArray());
-
- [JsonPropertyName("type")]
- public string Type { get; set; } = "object";
-
- [JsonPropertyName("required")]
- public List Required { get; set; } = [];
-
- [JsonPropertyName("properties")]
- public Dictionary Properties { get; set; } = [];
- }
-
- /// Creates an from a .
- /// The content part to convert into a content.
- /// The constructed , or null if the content part could not be converted.
- private static AIContent? ToAIContent(ChatMessageContentPart contentPart)
- {
- AIContent? aiContent = null;
-
- if (contentPart.Kind == ChatMessageContentPartKind.Text)
- {
- aiContent = new TextContent(contentPart.Text);
- }
- else if (contentPart.Kind == ChatMessageContentPartKind.Image)
- {
- ImageContent? imageContent;
- aiContent = imageContent =
- contentPart.ImageUri is not null ? new ImageContent(contentPart.ImageUri, contentPart.ImageBytesMediaType) :
- contentPart.ImageBytes is not null ? new ImageContent(contentPart.ImageBytes.ToMemory(), contentPart.ImageBytesMediaType) :
- null;
-
- if (imageContent is not null && contentPart.ImageDetailLevel?.ToString() is string detail)
- {
- (imageContent.AdditionalProperties ??= [])[nameof(contentPart.ImageDetailLevel)] = detail;
- }
- }
-
- if (aiContent is not null)
- {
- if (contentPart.Refusal is string refusal)
- {
- (aiContent.AdditionalProperties ??= [])[nameof(contentPart.Refusal)] = refusal;
- }
-
- aiContent.RawRepresentation = contentPart;
- }
-
- return aiContent;
- }
-
- /// Converts an Extensions chat message enumerable to an OpenAI chat message enumerable.
- private IEnumerable ToOpenAIChatMessages(IEnumerable inputs)
- {
- // Maps all of the M.E.AI types to the corresponding OpenAI types.
- // Unrecognized or non-processable content is ignored.
-
- foreach (ChatMessage input in inputs)
- {
- if (input.Role == ChatRole.System || input.Role == ChatRole.User)
- {
- var parts = GetContentParts(input.Contents);
- yield return input.Role == ChatRole.System ?
- new SystemChatMessage(parts) { ParticipantName = input.AuthorName } :
- new UserChatMessage(parts) { ParticipantName = input.AuthorName };
- }
- else if (input.Role == ChatRole.Tool)
- {
- foreach (AIContent item in input.Contents)
- {
- if (item is FunctionResultContent resultContent)
- {
- string? result = resultContent.Result as string;
- if (result is null && resultContent.Result is not null)
- {
- try
- {
- result = JsonSerializer.Serialize(resultContent.Result, ToolCallJsonSerializerOptions.GetTypeInfo(typeof(object)));
- }
- catch (NotSupportedException)
- {
- // If the type can't be serialized, skip it.
- }
- }
-
- yield return new ToolChatMessage(resultContent.CallId, result ?? string.Empty);
- }
- }
- }
- else if (input.Role == ChatRole.Assistant)
- {
- AssistantChatMessage message = new(GetContentParts(input.Contents))
- {
- ParticipantName = input.AuthorName
- };
-
- foreach (var content in input.Contents)
- {
- if (content is FunctionCallContent { CallId: not null } callRequest)
- {
- message.ToolCalls.Add(
- ChatToolCall.CreateFunctionToolCall(
- callRequest.CallId,
- callRequest.Name,
- new(JsonSerializer.SerializeToUtf8Bytes(
- callRequest.Arguments,
- ToolCallJsonSerializerOptions.GetTypeInfo(typeof(IDictionary))))));
- }
- }
-
- if (input.AdditionalProperties?.TryGetValue(nameof(message.Refusal), out string? refusal) is true)
- {
- message.Refusal = refusal;
- }
-
- yield return message;
- }
- }
- }
-
- /// Converts a list of to a list of .
- private static List GetContentParts(IList contents)
- {
- List parts = [];
- foreach (var content in contents)
- {
- switch (content)
- {
- case TextContent textContent:
- parts.Add(ChatMessageContentPart.CreateTextPart(textContent.Text));
- break;
-
- case ImageContent imageContent when imageContent.Data is { IsEmpty: false } data:
- parts.Add(ChatMessageContentPart.CreateImagePart(BinaryData.FromBytes(data), imageContent.MediaType));
- break;
-
- case ImageContent imageContent when imageContent.Uri is string uri:
- parts.Add(ChatMessageContentPart.CreateImagePart(new Uri(uri)));
- break;
- }
- }
-
- if (parts.Count == 0)
- {
- parts.Add(ChatMessageContentPart.CreateTextPart(string.Empty));
- }
-
- return parts;
- }
-
- private static FunctionCallContent ParseCallContentFromJsonString(string json, string callId, string name) =>
- FunctionCallContent.CreateFromParsedArguments(json, callId, name,
- argumentParser: static json => JsonSerializer.Deserialize(json,
- (JsonTypeInfo>)AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary)))!);
-
- private static FunctionCallContent ParseCallContentFromBinaryData(BinaryData ut8Json, string callId, string name) =>
- FunctionCallContent.CreateFromParsedArguments(ut8Json, callId, name,
- argumentParser: static json => JsonSerializer.Deserialize(json,
- (JsonTypeInfo>)AIJsonUtilities.DefaultOptions.GetTypeInfo(typeof(IDictionary)))!);
}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs
new file mode 100644
index 00000000000..dba0e5ecbf8
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIChatCompletionRequest.cs
@@ -0,0 +1,32 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Collections.Generic;
+
+namespace Microsoft.Extensions.AI;
+
+///
+/// Represents an OpenAI chat completion request deserialized as Microsoft.Extension.AI models.
+///
+public sealed class OpenAIChatCompletionRequest
+{
+ ///
+ /// Gets the chat messages specified in the completion request.
+ ///
+ public required IList Messages { get; init; }
+
+ ///
+ /// Gets the chat options governing the completion request.
+ ///
+ public required ChatOptions Options { get; init; }
+
+ ///
+ /// Gets a value indicating whether the completion response should be streamed.
+ ///
+ public bool Stream { get; init; }
+
+ ///
+ /// Gets the model id requested by the chat completion.
+ ///
+ public string? ModelId { get; init; }
+}
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
index 9cd075e1d04..69f610b4818 100644
--- a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIJsonContext.cs
@@ -1,6 +1,7 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
+using System.Collections.Generic;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -11,6 +12,7 @@ namespace Microsoft.Extensions.AI;
UseStringEnumConverter = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = true)]
-[JsonSerializable(typeof(OpenAIChatClient.OpenAIChatToolJson))]
[JsonSerializable(typeof(OpenAIRealtimeExtensions.ConversationFunctionToolParametersSchema))]
+[JsonSerializable(typeof(OpenAIModelMappers.OpenAIChatToolJson))]
+[JsonSerializable(typeof(IDictionary))]
internal sealed partial class OpenAIJsonContext : JsonSerializerContext;
diff --git a/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
new file mode 100644
index 00000000000..9f35727cf80
--- /dev/null
+++ b/src/Libraries/Microsoft.Extensions.AI.OpenAI/OpenAIModelMapper.ChatCompletion.cs
@@ -0,0 +1,610 @@
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Text;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Shared.Diagnostics;
+using OpenAI.Chat;
+
+#pragma warning disable SA1204 // Static elements should appear before instance elements
+#pragma warning disable S103 // Lines should not be too long
+#pragma warning disable CA1859 // Use concrete types when possible for improved performance
+#pragma warning disable S1067 // Expressions should not be too complex
+
+namespace Microsoft.Extensions.AI;
+
+internal static partial class OpenAIModelMappers
+{
+ private static readonly JsonElement _defaultParameterSchema = JsonDocument.Parse("{}").RootElement;
+
+ public static OpenAI.Chat.ChatCompletion ToOpenAIChatCompletion(ChatCompletion chatCompletion, JsonSerializerOptions options)
+ {
+ _ = Throw.IfNull(chatCompletion);
+
+ if (chatCompletion.Choices.Count > 1)
+ {
+ throw new NotSupportedException("Creating OpenAI ChatCompletion models with multiple choices is currently not supported.");
+ }
+
+ List? toolCalls = null;
+ foreach (AIContent content in chatCompletion.Message.Contents)
+ {
+ if (content is FunctionCallContent callRequest)
+ {
+ toolCalls ??= [];
+ toolCalls.Add(ChatToolCall.CreateFunctionToolCall(
+ callRequest.CallId,
+ callRequest.Name,
+ new(JsonSerializer.SerializeToUtf8Bytes(
+ callRequest.Arguments,
+ options.GetTypeInfo(typeof(IDictionary))))));
+ }
+ }
+
+ OpenAI.Chat.ChatTokenUsage? chatTokenUsage = null;
+ if (chatCompletion.Usage is UsageDetails usageDetails)
+ {
+ chatTokenUsage = ToOpenAIUsage(usageDetails);
+ }
+
+ return OpenAIChatModelFactory.ChatCompletion(
+ id: chatCompletion.CompletionId,
+ model: chatCompletion.ModelId,
+ createdAt: chatCompletion.CreatedAt ?? default,
+ role: ToOpenAIChatRole(chatCompletion.Message.Role).Value,
+ finishReason: ToOpenAIFinishReason(chatCompletion.FinishReason),
+ content: new(ToOpenAIChatContent(chatCompletion.Message.Contents)),
+ toolCalls: toolCalls,
+ refusal: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.Refusal)),
+ contentTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.ContentTokenLogProbabilities)),
+ refusalTokenLogProbabilities: chatCompletion.AdditionalProperties.GetValueOrDefault>(nameof(OpenAI.Chat.ChatCompletion.RefusalTokenLogProbabilities)),
+ systemFingerprint: chatCompletion.AdditionalProperties.GetValueOrDefault(nameof(OpenAI.Chat.ChatCompletion.SystemFingerprint)),
+ usage: chatTokenUsage);
+ }
+
+ public static ChatCompletion FromOpenAIChatCompletion(OpenAI.Chat.ChatCompletion openAICompletion, ChatOptions? options)
+ {
+ _ = Throw.IfNull(openAICompletion);
+
+ // Create the return message.
+ ChatMessage returnMessage = new()
+ {
+ RawRepresentation = openAICompletion,
+ Role = FromOpenAIChatRole(openAICompletion.Role),
+ };
+
+ // Populate its content from those in the OpenAI response content.
+ foreach (ChatMessageContentPart contentPart in openAICompletion.Content)
+ {
+ if (ToAIContent(contentPart) is AIContent aiContent)
+ {
+ returnMessage.Contents.Add(aiContent);
+ }
+ }
+
+ // Also manufacture function calling content items from any tool calls in the response.
+ if (options?.Tools is { Count: > 0 })
+ {
+ foreach (ChatToolCall toolCall in openAICompletion.ToolCalls)
+ {
+ if (!string.IsNullOrWhiteSpace(toolCall.FunctionName))
+ {
+ var callContent = ParseCallContentFromBinaryData(toolCall.FunctionArguments, toolCall.Id, toolCall.FunctionName);
+ callContent.RawRepresentation = toolCall;
+
+ returnMessage.Contents.Add(callContent);
+ }
+ }
+ }
+
+ // Wrap the content in a ChatCompletion to return.
+ var completion = new ChatCompletion([returnMessage])
+ {
+ RawRepresentation = openAICompletion,
+ CompletionId = openAICompletion.Id,
+ CreatedAt = openAICompletion.CreatedAt,
+ ModelId = openAICompletion.Model,
+ FinishReason = FromOpenAIFinishReason(openAICompletion.FinishReason),
+ };
+
+ if (openAICompletion.Usage is ChatTokenUsage tokenUsage)
+ {
+ completion.Usage = FromOpenAIUsage(tokenUsage);
+ }
+
+ if (openAICompletion.ContentTokenLogProbabilities is { Count: > 0 } contentTokenLogProbs)
+ {
+ (completion.AdditionalProperties ??= [])[nameof(openAICompletion.ContentTokenLogProbabilities)] = contentTokenLogProbs;
+ }
+
+ if (openAICompletion.Refusal is string refusal)
+ {
+ (completion.AdditionalProperties ??= [])[nameof(openAICompletion.Refusal)] = refusal;
+ }
+
+ if (openAICompletion.RefusalTokenLogProbabilities is { Count: > 0 } refusalTokenLogProbs)
+ {
+ (completion.AdditionalProperties ??= [])[nameof(openAICompletion.RefusalTokenLogProbabilities)] = refusalTokenLogProbs;
+ }
+
+ if (openAICompletion.SystemFingerprint is string systemFingerprint)
+ {
+ (completion.AdditionalProperties ??= [])[nameof(openAICompletion.SystemFingerprint)] = systemFingerprint;
+ }
+
+ return completion;
+ }
+
+ public static ChatOptions FromOpenAIOptions(OpenAI.Chat.ChatCompletionOptions? options)
+ {
+ ChatOptions result = new();
+
+ if (options is not null)
+ {
+ result.ModelId = _getModelIdAccessor.Invoke(options, null)?.ToString();
+ result.FrequencyPenalty = options.FrequencyPenalty;
+ result.MaxOutputTokens = options.MaxOutputTokenCount;
+ result.TopP = options.TopP;
+ result.PresencePenalty = options.PresencePenalty;
+ result.Temperature = options.Temperature;
+#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates.
+ result.Seed = options.Seed;
+#pragma warning restore OPENAI001
+
+ if (options.StopSequences is { Count: > 0 } stopSequences)
+ {
+ result.StopSequences = [.. stopSequences];
+ }
+
+ if (options.EndUserId is string endUserId)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.EndUserId)] = endUserId;
+ }
+
+ if (options.IncludeLogProbabilities is bool includeLogProbabilities)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.IncludeLogProbabilities)] = includeLogProbabilities;
+ }
+
+ if (options.LogitBiases is { Count: > 0 } logitBiases)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.LogitBiases)] = new Dictionary(logitBiases);
+ }
+
+ if (options.AllowParallelToolCalls is bool allowParallelToolCalls)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.AllowParallelToolCalls)] = allowParallelToolCalls;
+ }
+
+ if (options.TopLogProbabilityCount is int topLogProbabilityCount)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.TopLogProbabilityCount)] = topLogProbabilityCount;
+ }
+
+ if (options.Metadata is IDictionary { Count: > 0 } metadata)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.Metadata)] = new Dictionary(metadata);
+ }
+
+ if (options.StoredOutputEnabled is bool storedOutputEnabled)
+ {
+ (result.AdditionalProperties ??= [])[nameof(options.StoredOutputEnabled)] = storedOutputEnabled;
+ }
+
+ if (options.Tools is { Count: > 0 } tools)
+ {
+ foreach (ChatTool tool in tools)
+ {
+ result.Tools ??= [];
+ result.Tools.Add(FromOpenAIChatTool(tool));
+ }
+
+ using var toolChoiceJson = JsonDocument.Parse(JsonModelHelpers.Serialize(options.ToolChoice).ToMemory());
+ JsonElement jsonElement = toolChoiceJson.RootElement;
+ switch (jsonElement.ValueKind)
+ {
+ case JsonValueKind.String:
+ result.ToolMode = jsonElement.GetString() switch
+ {
+ "required" => ChatToolMode.RequireAny,
+ _ => ChatToolMode.Auto,
+ };
+
+ break;
+ case JsonValueKind.Object:
+ if (jsonElement.TryGetProperty("function", out JsonElement functionElement))
+ {
+ result.ToolMode = ChatToolMode.RequireSpecific(functionElement.GetString()!);
+ }
+
+ break;
+ }
+ }
+ }
+
+ return result;
+ }
+
+ /// Converts an extensions options instance to an OpenAI options instance.
+ public static OpenAI.Chat.ChatCompletionOptions ToOpenAIOptions(ChatOptions? options)
+ {
+ ChatCompletionOptions result = new();
+
+ if (options is not null)
+ {
+ result.FrequencyPenalty = options.FrequencyPenalty;
+ result.MaxOutputTokenCount = options.MaxOutputTokens;
+ result.TopP = options.TopP;
+ result.PresencePenalty = options.PresencePenalty;
+ result.Temperature = options.Temperature;
+#pragma warning disable OPENAI001 // Type is for evaluation purposes only and is subject to change or removal in future updates.
+ result.Seed = options.Seed;
+#pragma warning restore OPENAI001
+
+ if (options.StopSequences is { Count: > 0 } stopSequences)
+ {
+ foreach (string stopSequence in stopSequences)
+ {
+ result.StopSequences.Add(stopSequence);
+ }
+ }
+
+ if (options.AdditionalProperties is { Count: > 0 } additionalProperties)
+ {
+ if (additionalProperties.TryGetValue(nameof(result.EndUserId), out string? endUserId))
+ {
+ result.EndUserId = endUserId;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.IncludeLogProbabilities), out bool includeLogProbabilities))
+ {
+ result.IncludeLogProbabilities = includeLogProbabilities;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.LogitBiases), out IDictionary? logitBiases))
+ {
+ foreach (KeyValuePair kvp in logitBiases!)
+ {
+ result.LogitBiases[kvp.Key] = kvp.Value;
+ }
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.AllowParallelToolCalls), out bool allowParallelToolCalls))
+ {
+ result.AllowParallelToolCalls = allowParallelToolCalls;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.TopLogProbabilityCount), out int topLogProbabilityCountInt))
+ {
+ result.TopLogProbabilityCount = topLogProbabilityCountInt;
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.Metadata), out IDictionary? metadata))
+ {
+ foreach (KeyValuePair kvp in metadata)
+ {
+ result.Metadata[kvp.Key] = kvp.Value;
+ }
+ }
+
+ if (additionalProperties.TryGetValue(nameof(result.StoredOutputEnabled), out bool storeOutputEnabled))
+ {
+ result.StoredOutputEnabled = storeOutputEnabled;
+ }
+ }
+
+ if (options.Tools is { Count: > 0 } tools)
+ {
+ foreach (AITool tool in tools)
+ {
+ if (tool is AIFunction af)
+ {
+ result.Tools.Add(ToOpenAIChatTool(af));
+ }
+ }
+
+ switch (options.ToolMode)
+ {
+ case AutoChatToolMode:
+ result.ToolChoice = ChatToolChoice.CreateAutoChoice();
+ break;
+
+ case RequiredChatToolMode required:
+ result.ToolChoice = required.RequiredFunctionName is null ?
+ ChatToolChoice.CreateRequiredChoice() :
+ ChatToolChoice.CreateFunctionChoice(required.RequiredFunctionName);
+ break;
+ }
+ }
+
+ if (options.ResponseFormat is ChatResponseFormatText)
+ {
+ result.ResponseFormat = OpenAI.Chat.ChatResponseFormat.CreateTextFormat();
+ }
+ else if (options.ResponseFormat is ChatResponseFormatJson jsonFormat)
+ {
+ result.ResponseFormat = jsonFormat.Schema is { } jsonSchema ?
+ OpenAI.Chat.ChatResponseFormat.CreateJsonSchemaFormat(
+ jsonFormat.SchemaName ?? "json_schema",
+ BinaryData.FromBytes(
+ JsonSerializer.SerializeToUtf8Bytes(jsonSchema, OpenAIJsonContext.Default.JsonElement)),
+ jsonFormat.SchemaDescription) :
+ OpenAI.Chat.ChatResponseFormat.CreateJsonObjectFormat();
+ }
+ }
+
+ return result;
+ }
+
+ private static AITool FromOpenAIChatTool(ChatTool chatTool)
+ {
+ AdditionalPropertiesDictionary additionalProperties = new();
+ if (chatTool.FunctionSchemaIsStrict is bool strictValue)
+ {
+ additionalProperties["Strict"] = strictValue;
+ }
+
+ OpenAIChatToolJson openAiChatTool = JsonSerializer.Deserialize(chatTool.FunctionParameters.ToMemory().Span, OpenAIJsonContext.Default.OpenAIChatToolJson)!;
+ List parameters = new(openAiChatTool.Properties.Count);
+ foreach (KeyValuePair property in openAiChatTool.Properties)
+ {
+ parameters.Add(new(property.Key)
+ {
+ Schema = property.Value,
+ IsRequired = openAiChatTool.Required.Contains(property.Key),
+ });
+ }
+
+ AIFunctionMetadata metadata = new(chatTool.FunctionName)
+ {
+ Description = chatTool.FunctionDescription,
+ AdditionalProperties = additionalProperties,
+ Parameters = parameters,
+ ReturnParameter = new()
+ {
+ Description = "Return parameter",
+ Schema = _defaultParameterSchema,
+ }
+ };
+
+ return new MetadataOnlyAIFunction(metadata);
+ }
+
+ private sealed class MetadataOnlyAIFunction(AIFunctionMetadata metadata) : AIFunction
+ {
+ public override AIFunctionMetadata Metadata => metadata;
+ protected override Task