Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Azure OpenAI structured outputs support #98

Merged
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import com.azure.core.util.Configuration;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.azure.*;
import dev.langchain4j.model.chat.request.ResponseFormat;
import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
Expand Down Expand Up @@ -46,6 +47,8 @@ AzureOpenAiChatModel openAiChatModel(Properties properties) {
.presencePenalty(chatModelProperties.presencePenalty())
.frequencyPenalty(chatModelProperties.frequencyPenalty())
.seed(chatModelProperties.seed())
.responseFormat(chatModelProperties.responseFormat() != null && chatModelProperties.responseFormat().equalsIgnoreCase("json") ? ResponseFormat.JSON : ResponseFormat.TEXT)
jdubois marked this conversation as resolved.
Show resolved Hide resolved
.strictJsonSchema(chatModelProperties.strictJsonSchema())
.timeout(Duration.ofSeconds(chatModelProperties.timeout() == null ? 0 : chatModelProperties.timeout()))
.maxRetries(chatModelProperties.maxRetries())
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ record ChatModelProperties(
Double frequencyPenalty,
Long seed,
String responseFormat,
Boolean strictJsonSchema,
Integer timeout, // TODO use Duration instead
Integer maxRetries,
Boolean logRequestsAndResponses,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@
import dev.langchain4j.model.azure.AzureOpenAiStreamingChatModel;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.request.ChatRequest;
import dev.langchain4j.model.chat.request.ResponseFormat;
import dev.langchain4j.model.chat.request.json.JsonArraySchema;
import dev.langchain4j.model.chat.request.json.JsonObjectSchema;
import dev.langchain4j.model.chat.request.json.JsonSchema;
import dev.langchain4j.model.chat.request.json.JsonStringSchema;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.output.Response;
Expand All @@ -17,8 +23,12 @@
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;

import java.util.List;
import java.util.concurrent.CompletableFuture;

import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.chat.request.ResponseFormatType.JSON;
import static java.util.Collections.singletonList;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.assertj.core.api.Assertions.assertThat;

Expand Down Expand Up @@ -53,6 +63,52 @@ void should_provide_chat_model(String deploymentName) {
});
}

class Person {

String name;
List<String> favouriteColors;
}

@ParameterizedTest(name = "Deployment name: {0}")
@CsvSource({
"gpt-4o-mini"
})
void should_provide_chat_model_with_json_schema(String deploymentName) {
contextRunner
.withPropertyValues(
"langchain4j.azure-open-ai.chat-model.api-key=" + AZURE_OPENAI_KEY,
"langchain4j.azure-open-ai.chat-model.endpoint=" + AZURE_OPENAI_ENDPOINT,
"langchain4j.azure-open-ai.chat-model.deployment-name=" + deploymentName,
"langchain4j.azure-open-ai.chat-model.strict-json-schema=true"
)
.run(context -> {

ChatLanguageModel chatLanguageModel = context.getBean(ChatLanguageModel.class);

ChatRequest chatRequest = ChatRequest.builder()
.messages(singletonList(userMessage("Julien likes blue, white and red")))
.responseFormat(ResponseFormat.builder()
.type(JSON)
.jsonSchema(JsonSchema.builder()
.name("Person")
.rootElement(JsonObjectSchema.builder()
.addStringProperty("name")
.addProperty("favouriteColors", JsonArraySchema.builder()
.items(new JsonStringSchema())
.build())
.required("name", "favouriteColors")
.build())
.build())
.build())
.build();

assertThat(chatLanguageModel).isInstanceOf(AzureOpenAiChatModel.class);
AiMessage aiMessage = chatLanguageModel.chat(chatRequest).aiMessage();
assertThat(aiMessage.text()).contains("{\"name\":\"Julien\",\"favouriteColors\":[\"blue\",\"white\",\"red\"]}");
assertThat(context.getBean(AzureOpenAiChatModel.class)).isSameAs(chatLanguageModel);
});
}

@ParameterizedTest(name = "Deployment name: {0}")
@CsvSource({
"gpt-3.5-turbo"
Expand Down
Loading