Skip to content

Commit

Permalink
fix: display error message from chat conversation (#1341)
Browse files Browse the repository at this point in the history
* fix: handle error message from chat

Signed-off-by: lstocchi <[email protected]>

* fix: add test

Signed-off-by: lstocchi <[email protected]>

* fix: format

Signed-off-by: lstocchi <[email protected]>

* fix: rename message with error

Signed-off-by: lstocchi <[email protected]>

* fix: fix test

Signed-off-by: lstocchi <[email protected]>

---------

Signed-off-by: lstocchi <[email protected]>
  • Loading branch information
lstocchi authored Jul 10, 2024
1 parent b78b9e6 commit 873bb7a
Show file tree
Hide file tree
Showing 5 changed files with 144 additions and 24 deletions.
71 changes: 69 additions & 2 deletions packages/backend/src/managers/playgroundV2Manager.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import { Messages } from '@shared/Messages';
import type { ModelInfo } from '@shared/src/models/IModelInfo';
import type { TaskRegistry } from '../registries/TaskRegistry';
import type { Task, TaskState } from '@shared/src/models/ITask';
import type { ChatMessage, ErrorMessage } from '@shared/src/models/IPlaygroundMessage';

vi.mock('openai', () => ({
default: vi.fn(),
Expand Down Expand Up @@ -185,7 +186,7 @@ test('valid submit should create IPlaygroundMessage and notify the webview', asy

// Wait for assistant message to be completed
await vi.waitFor(() => {
expect(manager.getConversations()[0].messages[1].content).toBeDefined();
expect((manager.getConversations()[0].messages[1] as ChatMessage).content).toBeDefined();
});

const conversations = manager.getConversations();
Expand Down Expand Up @@ -272,6 +273,72 @@ test('submit should send options', async () => {
});
});

test('error', async () => {
vi.mocked(inferenceManagerMock.getServers).mockReturnValue([
{
status: 'running',
health: {
Status: 'healthy',
},
models: [
{
id: 'dummyModelId',
file: {
file: 'dummyModelFile',
},
},
],
connection: {
port: 8888,
},
} as unknown as InferenceServer,
]);
const createMock = vi.fn().mockRejectedValue('Please reduce the length of the messages or completion.');
vi.mocked(OpenAI).mockReturnValue({
chat: {
completions: {
create: createMock,
},
},
} as unknown as OpenAI);

const manager = new PlaygroundV2Manager(webviewMock, inferenceManagerMock, taskRegistryMock, telemetryMock);
await manager.createPlayground('playground 1', { id: 'dummyModelId' } as ModelInfo, 'tracking-1');

const date = new Date(2000, 1, 1, 13);
vi.setSystemTime(date);

const playgrounds = manager.getConversations();
await manager.submit(playgrounds[0].id, 'dummyUserInput');

// Wait for error message
await vi.waitFor(() => {
expect((manager.getConversations()[0].messages[1] as ErrorMessage).error).toBeDefined();
});

const conversations = manager.getConversations();

expect(conversations.length).toBe(1);
expect(conversations[0].messages.length).toBe(2);
expect(conversations[0].messages[0]).toStrictEqual({
content: 'dummyUserInput',
id: expect.anything(),
options: undefined,
role: 'user',
timestamp: expect.any(Number),
});
expect(conversations[0].messages[1]).toStrictEqual({
error: 'Please reduce the length of the messages or completion. Note: You should start a new playground.',
id: expect.anything(),
timestamp: expect.any(Number),
});

expect(webviewMock.postMessage).toHaveBeenLastCalledWith({
id: Messages.MSG_CONVERSATIONS_UPDATE,
body: conversations,
});
});

test('creating a new playground should send new playground to frontend', async () => {
vi.mocked(inferenceManagerMock.getServers).mockReturnValue([]);
const manager = new PlaygroundV2Manager(webviewMock, inferenceManagerMock, taskRegistryMock, telemetryMock);
Expand Down Expand Up @@ -570,7 +637,7 @@ describe('system prompt', () => {

// Wait for assistant message to be completed
await vi.waitFor(() => {
expect(manager.getConversations()[0].messages[1].content).toBeDefined();
expect((manager.getConversations()[0].messages[1] as ChatMessage).content).toBeDefined();
});

expect(() => {
Expand Down
43 changes: 34 additions & 9 deletions packages/backend/src/managers/playgroundV2Manager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,14 @@ import type { ChatCompletionChunk, ChatCompletionMessageParam } from 'openai/src
import type { ModelOptions } from '@shared/src/models/IModelOptions';
import type { Stream } from 'openai/streaming';
import { ConversationRegistry } from '../registries/ConversationRegistry';
import type { Conversation, PendingChat, SystemPrompt, UserChat } from '@shared/src/models/IPlaygroundMessage';
import { isSystemPrompt } from '@shared/src/models/IPlaygroundMessage';
import type {
Conversation,
ErrorMessage,
PendingChat,
SystemPrompt,
UserChat,
} from '@shared/src/models/IPlaygroundMessage';
import { isChatMessage, isSystemPrompt } from '@shared/src/models/IPlaygroundMessage';
import type { ModelInfo } from '@shared/src/models/IModelInfo';
import { withDefaultConfiguration } from '../utils/inferenceUtils';
import { getRandomString } from '../utils/randomUtils';
Expand Down Expand Up @@ -238,12 +244,29 @@ export class PlaygroundV2Manager implements Disposable {
.catch((err: unknown) => {
telemetry['errorMessage'] = `${String(err)}`;
console.error('Something went wrong while creating model response', err);
this.processError(conversation.id, err).catch((err: unknown) => {
console.error('Something went wrong while processing stream', err);
});
})
.finally(() => {
this.telemetry.logUsage('playground.submit', telemetry);
});
}

private async processError(conversationId: string, error: unknown): Promise<void> {
let errorMessage = String(error);
if (errorMessage.endsWith('Please reduce the length of the messages or completion.')) {
errorMessage += ' Note: You should start a new playground.';
}
const messageId = this.#conversationRegistry.getUniqueId();
const start = Date.now();
this.#conversationRegistry.submit(conversationId, {
id: messageId,
timestamp: start,
error: errorMessage,
} as ErrorMessage);
}

/**
* Given a Stream from the OpenAI library update and notify the publisher
* @param conversationId
Expand Down Expand Up @@ -283,13 +306,15 @@ export class PlaygroundV2Manager implements Disposable {
const conversation = this.#conversationRegistry.get(conversationId);
if (!conversation) throw new Error(`conversation with id ${conversationId} does not exist.`);

return conversation.messages.map(
message =>
({
name: undefined,
...message,
}) as ChatCompletionMessageParam,
);
return conversation.messages
.filter(m => isChatMessage(m))
.map(
message =>
({
name: undefined,
...message,
}) as ChatCompletionMessageParam,
);
}

getConversations(): Conversation[] {
Expand Down
3 changes: 2 additions & 1 deletion packages/backend/src/registries/ConversationRegistry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import type {
ChatMessage,
Choice,
Conversation,
Message,
PendingChat,
} from '@shared/src/models/IPlaygroundMessage';
import type { Disposable, Webview } from '@podman-desktop/api';
Expand Down Expand Up @@ -141,7 +142,7 @@ export class ConversationRegistry extends Publisher<Conversation[]> implements D
* @param conversationId
* @param message
*/
submit(conversationId: string, message: ChatMessage): void {
submit(conversationId: string, message: Message): void {
const conversation = this.#conversations.get(conversationId);
if (conversation === undefined) throw new Error(`conversation with id ${conversationId} does not exist.`);

Expand Down
16 changes: 14 additions & 2 deletions packages/frontend/src/pages/Playground.svelte
Original file line number Diff line number Diff line change
@@ -1,7 +1,14 @@
<script lang="ts">
import { conversations } from '../stores/conversations';
import { studioClient } from '/@/utils/client';
import { isAssistantChat, isPendingChat, isUserChat, isSystemPrompt } from '@shared/src/models/IPlaygroundMessage';
import {
isAssistantChat,
isPendingChat,
isUserChat,
isSystemPrompt,
isChatMessage,
isErrorMessage,
} from '@shared/src/models/IPlaygroundMessage';
import { catalog } from '../stores/catalog';
import { afterUpdate } from 'svelte';
import ContentDetailsLayout from '../lib/ContentDetailsLayout.svelte';
Expand Down Expand Up @@ -30,14 +37,19 @@ let max_tokens = -1;
let top_p = 0.5;
$: conversation = $conversations.find(conversation => conversation.id === playgroundId);
$: messages = conversation?.messages.filter(message => !isSystemPrompt(message)) ?? [];
$: messages =
conversation?.messages.filter(message => isChatMessage(message)).filter(message => !isSystemPrompt(message)) ?? [];
$: model = $catalog.models.find(model => model.id === conversation?.modelId);
$: {
if (conversation?.messages.length) {
const latest = conversation.messages[conversation.messages.length - 1];
if (isSystemPrompt(latest) || (isAssistantChat(latest) && !isPendingChat(latest))) {
sendEnabled = true;
}
if (isErrorMessage(latest)) {
errorMsg = latest.error;
sendEnabled = true;
}
} else {
sendEnabled = true;
}
Expand Down
35 changes: 25 additions & 10 deletions packages/shared/src/models/IPlaygroundMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,18 @@

import type { ModelOptions } from './IModelOptions';

export interface ChatMessage {
export interface Message {
id: string;
timestamp: number;
}

export interface ErrorMessage extends Message {
error: string;
}

export interface ChatMessage extends Message {
role: 'system' | 'user' | 'assistant';
content?: string;
timestamp: number;
}

export interface AssistantChat extends ChatMessage {
Expand All @@ -47,7 +54,7 @@ export interface UserChat extends ChatMessage {

export interface Conversation {
id: string;
messages: ChatMessage[];
messages: Message[];
modelId: string;
name: string;
}
Expand All @@ -56,18 +63,26 @@ export interface Choice {
content: string;
}

export function isAssistantChat(msg: ChatMessage): msg is AssistantChat {
return msg.role === 'assistant';
export function isErrorMessage(msg: Message): msg is ErrorMessage {
return 'error' in msg;
}

export function isChatMessage(msg: Message): msg is ChatMessage {
return 'role' in msg;
}

export function isAssistantChat(msg: Message): msg is AssistantChat {
return isChatMessage(msg) && msg.role === 'assistant';
}

export function isUserChat(msg: ChatMessage): msg is UserChat {
return msg.role === 'user';
export function isUserChat(msg: Message): msg is UserChat {
return isChatMessage(msg) && msg.role === 'user';
}

export function isPendingChat(msg: ChatMessage): msg is PendingChat {
export function isPendingChat(msg: Message): msg is PendingChat {
return isAssistantChat(msg) && !msg.completed;
}

export function isSystemPrompt(msg: ChatMessage): msg is SystemPrompt {
return msg.role === 'system';
export function isSystemPrompt(msg: Message): msg is SystemPrompt {
return isChatMessage(msg) && msg.role === 'system';
}

0 comments on commit 873bb7a

Please sign in to comment.