Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[azopenai] Adding in streaming for ChatCompletions, like we have for Completions. #21072

Merged
merged 2 commits into from
Jun 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion sdk/cognitiveservices/azopenai/assets.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "go",
"TagPrefix": "go/cognitiveservices/azopenai",
"Tag": "go/cognitiveservices/azopenai_49bcacb061"
"Tag": "go/cognitiveservices/azopenai_bf5b07347b"
}
187 changes: 187 additions & 0 deletions sdk/cognitiveservices/azopenai/client_chat_completions_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,187 @@
//go:build go1.18
richardpark-msft marked this conversation as resolved.
Show resolved Hide resolved
// +build go1.18

// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.

package azopenai_test

import (
"context"
"errors"
"io"
"os"
"testing"

"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/azidentity"
"github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai"
"github.com/Azure/azure-sdk-for-go/sdk/internal/recording"
"github.com/stretchr/testify/require"
)

var chatCompletionsRequest = azopenai.ChatCompletionsOptions{
Messages: []*azopenai.ChatMessage{
{
Role: to.Ptr(azopenai.ChatRole("user")),
Content: to.Ptr("Count to 10, with a comma between each number, no newlines and a period at the end. E.g., 1, 2, 3, ..."),
},
},
MaxTokens: to.Ptr(int32(1024)),
Temperature: to.Ptr(float32(0.0)),
Model: &openAIChatCompletionsModelDeployment,
}

var expectedContent = "1, 2, 3, 4, 5, 6, 7, 8, 9, 10."
var expectedRole = azopenai.ChatRoleAssistant

func TestClient_GetChatCompletions(t *testing.T) {
cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

chatClient, err := azopenai.NewClientWithKeyCredential(endpoint, cred, chatCompletionsModelDeployment, newClientOptionsForTest(t))
require.NoError(t, err)

testGetChatCompletions(t, chatClient)
}

func TestClient_GetChatCompletionsStream(t *testing.T) {
cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

chatClient, err := azopenai.NewClientWithKeyCredential(endpoint, cred, chatCompletionsModelDeployment, newClientOptionsForTest(t))
require.NoError(t, err)

testGetChatCompletionsStream(t, chatClient)
}

func TestClient_OpenAI_GetChatCompletions(t *testing.T) {
chatClient := newOpenAIClientForTest(t)
testGetChatCompletions(t, chatClient)
}

func TestClient_OpenAI_GetChatCompletionsStream(t *testing.T) {
chatClient := newOpenAIClientForTest(t)
testGetChatCompletionsStream(t, chatClient)
}

func testGetChatCompletions(t *testing.T, client *azopenai.Client) {
expected := azopenai.ChatCompletions{
Choices: []*azopenai.ChatChoice{
{
Message: &azopenai.ChatChoiceMessage{
Role: &expectedRole,
Content: &expectedContent,
},
Index: to.Ptr(int32(0)),
FinishReason: to.Ptr(azopenai.CompletionsFinishReason("stop")),
},
},
Usage: &azopenai.CompletionsUsage{
// these change depending on which model you use. These #'s work for gpt-4, which is
// what I'm using for these tests.
CompletionTokens: to.Ptr(int32(29)),
PromptTokens: to.Ptr(int32(42)),
TotalTokens: to.Ptr(int32(71)),
},
}

resp, err := client.GetChatCompletions(context.Background(), chatCompletionsRequest, nil)
require.NoError(t, err)

require.NotEmpty(t, resp.ID)
require.NotEmpty(t, resp.Created)

expected.ID = resp.ID
expected.Created = resp.Created

require.Equal(t, expected, resp.ChatCompletions)
}

func testGetChatCompletionsStream(t *testing.T, client *azopenai.Client) {
streamResp, err := client.GetChatCompletionsStream(context.Background(), chatCompletionsRequest, nil)
require.NoError(t, err)

// the data comes back differently for streaming
// 1. the text comes back in the ChatCompletion.Delta field
// 2. the role is only sent on the first streamed ChatCompletion
// check that the role came back as well.
var choices []*azopenai.ChatChoice

for {
completion, err := streamResp.ChatCompletionsStream.Read()

if errors.Is(err, io.EOF) {
break
}

require.NoError(t, err)
require.Equal(t, 1, len(completion.Choices))
choices = append(choices, completion.Choices[0])
}

var message string

for _, choice := range choices {
if choice.Delta.Content == nil {
continue
}

message += *choice.Delta.Content
}

require.Equal(t, expectedContent, message, "Ultimately, the same result as GetChatCompletions(), just sent across the .Delta field instead")

require.Equal(t, azopenai.ChatRoleAssistant, expectedRole)
}

func TestClient_GetChatCompletions_DefaultAzureCredential(t *testing.T) {
if recording.GetRecordMode() == recording.PlaybackMode {
t.Skipf("Not running this test in playback (for now)")
}

if os.Getenv("USE_TOKEN_CREDS") != "true" {
t.Skipf("USE_TOKEN_CREDS is not true, disabling token credential tests")
}

recordingTransporter := newRecordingTransporter(t)

dac, err := azidentity.NewDefaultAzureCredential(&azidentity.DefaultAzureCredentialOptions{
ClientOptions: policy.ClientOptions{
Transport: recordingTransporter,
},
})
require.NoError(t, err)

chatClient, err := azopenai.NewClient(endpoint, dac, chatCompletionsModelDeployment, &azopenai.ClientOptions{
ClientOptions: policy.ClientOptions{Transport: recordingTransporter},
})
require.NoError(t, err)

testGetChatCompletions(t, chatClient)
}

func TestClient_GetChatCompletions_InvalidModel(t *testing.T) {
cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

chatClient, err := azopenai.NewClientWithKeyCredential(endpoint, cred, "thisdoesntexist", newClientOptionsForTest(t))
require.NoError(t, err)

_, err = chatClient.GetChatCompletions(context.Background(), azopenai.ChatCompletionsOptions{
Messages: []*azopenai.ChatMessage{
{
Role: to.Ptr(azopenai.ChatRole("user")),
Content: to.Ptr("Count to 100, with a comma between each number and no newlines. E.g., 1, 2, 3, ..."),
},
},
MaxTokens: to.Ptr(int32(1024)),
Temperature: to.Ptr(float32(0.0)),
}, nil)

var respErr *azcore.ResponseError
require.ErrorAs(t, err, &respErr)
require.Equal(t, "DeploymentNotFound", respErr.ErrorCode)
}
88 changes: 88 additions & 0 deletions sdk/cognitiveservices/azopenai/client_completions_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
//go:build go1.18
// +build go1.18

// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.

package azopenai_test

import (
"context"
"log"
"testing"

"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/stretchr/testify/require"
)

func TestClient_GetCompletions(t *testing.T) {
type args struct {
ctx context.Context
deploymentID string
body azopenai.CompletionsOptions
options *azopenai.GetCompletionsOptions
}
cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

client, err := azopenai.NewClientWithKeyCredential(endpoint, cred, completionsModelDeployment, newClientOptionsForTest(t))
if err != nil {
log.Fatalf("%v", err)
}
tests := []struct {
name string
client *azopenai.Client
args args
want azopenai.GetCompletionsResponse
wantErr bool
}{
{
name: "chatbot",
client: client,
args: args{
ctx: context.TODO(),
deploymentID: completionsModelDeployment,
body: azopenai.CompletionsOptions{
Prompt: []*string{to.Ptr("What is Azure OpenAI?")},
MaxTokens: to.Ptr(int32(2048 - 127)),
Temperature: to.Ptr(float32(0.0)),
},
options: nil,
},
want: azopenai.GetCompletionsResponse{
Completions: azopenai.Completions{
Choices: []*azopenai.Choice{
{
Text: to.Ptr("\n\nAzure OpenAI is a platform from Microsoft that provides access to OpenAI's artificial intelligence (AI) technologies. It enables developers to build, train, and deploy AI models in the cloud. Azure OpenAI provides access to OpenAI's powerful AI technologies, such as GPT-3, which can be used to create natural language processing (NLP) applications, computer vision models, and reinforcement learning models."),
Index: to.Ptr(int32(0)),
FinishReason: to.Ptr(azopenai.CompletionsFinishReason("stop")),
Logprobs: nil,
},
},
Usage: &azopenai.CompletionsUsage{
CompletionTokens: to.Ptr(int32(85)),
PromptTokens: to.Ptr(int32(6)),
TotalTokens: to.Ptr(int32(91)),
},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.client.GetCompletions(tt.args.ctx, tt.args.body, tt.args.options)
if (err != nil) != tt.wantErr {
t.Errorf("Client.GetCompletions() error = %v, wantErr %v", err, tt.wantErr)
return
}
opts := cmpopts.IgnoreFields(azopenai.Completions{}, "Created", "ID")
if diff := cmp.Diff(tt.want.Completions, got.Completions, opts); diff != "" {
t.Errorf("Client.GetCompletions(): -want, +got:\n%s", diff)
}
})
}
}
97 changes: 97 additions & 0 deletions sdk/cognitiveservices/azopenai/client_embeddings_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.

package azopenai_test

import (
"context"
"testing"

"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/cognitiveservices/azopenai"
"github.com/stretchr/testify/require"
)

func TestClient_GetEmbeddings_InvalidModel(t *testing.T) {
cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

chatClient, err := azopenai.NewClientWithKeyCredential(endpoint, cred, "thisdoesntexist", newClientOptionsForTest(t))
require.NoError(t, err)

_, err = chatClient.GetEmbeddings(context.Background(), azopenai.EmbeddingsOptions{}, nil)

var respErr *azcore.ResponseError
require.ErrorAs(t, err, &respErr)
require.Equal(t, "DeploymentNotFound", respErr.ErrorCode)
}

func TestClient_OpenAI_GetEmbeddings(t *testing.T) {
client := newOpenAIClientForTest(t)
modelID := "text-similarity-curie-001"
testGetEmbeddings(t, client, modelID)
}

func TestClient_GetEmbeddings(t *testing.T) {
// model deployment points to `text-similarity-curie-001`
deploymentID := "embedding"

cred, err := azopenai.NewKeyCredential(apiKey)
require.NoError(t, err)

client, err := azopenai.NewClientWithKeyCredential(endpoint, cred, deploymentID, newClientOptionsForTest(t))
require.NoError(t, err)

testGetEmbeddings(t, client, deploymentID)
}

func testGetEmbeddings(t *testing.T, client *azopenai.Client, modelOrDeploymentID string) {
type args struct {
ctx context.Context
deploymentID string
body azopenai.EmbeddingsOptions
options *azopenai.GetEmbeddingsOptions
}

tests := []struct {
name string
client *azopenai.Client
args args
want azopenai.GetEmbeddingsResponse
wantErr bool
}{
{
name: "Embeddings",
client: client,
args: args{
ctx: context.TODO(),
deploymentID: modelOrDeploymentID,
body: azopenai.EmbeddingsOptions{
Input: []byte("\"Your text string goes here\""),
Model: &modelOrDeploymentID,
},
options: nil,
},
want: azopenai.GetEmbeddingsResponse{
azopenai.Embeddings{
Data: []*azopenai.EmbeddingItem{},
Usage: &azopenai.EmbeddingsUsage{},
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := tt.client.GetEmbeddings(tt.args.ctx, tt.args.body, tt.args.options)
if (err != nil) != tt.wantErr {
t.Errorf("Client.GetEmbeddings() error = %v, wantErr %v", err, tt.wantErr)
return
}
if len(got.Embeddings.Data[0].Embedding) != 4096 {
t.Errorf("Client.GetEmbeddings() len(Data) want 4096, got %d", len(got.Embeddings.Data))
return
}
})
}
}
Loading