Skip to content

Commit

Permalink
chore(participant) Wire up telemetry for user prompts VSCODE-606 (#836)
Browse files Browse the repository at this point in the history
* Wire up telemetry for user prompts

* Remove .only

* Clean up rebase artifacts

* Remove .only

* Address CR feedback

* history_length -> history_size
  • Loading branch information
nirinchev authored Sep 27, 2024
1 parent 60021d6 commit 6d7150f
Show file tree
Hide file tree
Showing 9 changed files with 330 additions and 60 deletions.
49 changes: 26 additions & 23 deletions src/participant/participant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ import {
} from '../telemetry/telemetryService';
import { DocsChatbotAIService } from './docsChatbotAIService';
import type TelemetryService from '../telemetry/telemetryService';
import type { ModelInput } from './prompts/promptBase';
import { processStreamWithIdentifiers } from './streamParsing';
import type { PromptIntent } from './prompts/intent';

Expand Down Expand Up @@ -164,10 +165,10 @@ export default class ParticipantController {
}

async _getChatResponse({
messages,
modelInput,
token,
}: {
messages: vscode.LanguageModelChatMessage[];
modelInput: ModelInput;
token: vscode.CancellationToken;
}): Promise<vscode.LanguageModelChatResponse> {
const model = await getCopilotModel();
Expand All @@ -176,20 +177,22 @@ export default class ParticipantController {
throw new Error('Copilot model not found');
}

return await model.sendRequest(messages, {}, token);
this._telemetryService.trackCopilotParticipantPrompt(modelInput.stats);

return await model.sendRequest(modelInput.messages, {}, token);
}

async streamChatResponse({
messages,
modelInput,
stream,
token,
}: {
messages: vscode.LanguageModelChatMessage[];
modelInput: ModelInput;
stream: vscode.ChatResponseStream;
token: vscode.CancellationToken;
}): Promise<void> {
const chatResponse = await this._getChatResponse({
messages,
modelInput,
token,
});
for await (const fragment of chatResponse.text) {
Expand Down Expand Up @@ -226,16 +229,16 @@ export default class ParticipantController {
}

async streamChatResponseContentWithCodeActions({
messages,
modelInput,
stream,
token,
}: {
messages: vscode.LanguageModelChatMessage[];
modelInput: ModelInput;
stream: vscode.ChatResponseStream;
token: vscode.CancellationToken;
}): Promise<void> {
const chatResponse = await this._getChatResponse({
messages,
modelInput,
token,
});

Expand All @@ -254,15 +257,15 @@ export default class ParticipantController {
// This will stream all of the response content and create a string from it.
// It should only be used when the entire response is needed at one time.
async getChatResponseContent({
messages,
modelInput,
token,
}: {
messages: vscode.LanguageModelChatMessage[];
modelInput: ModelInput;
token: vscode.CancellationToken;
}): Promise<string> {
let responseContent = '';
const chatResponse = await this._getChatResponse({
messages,
modelInput,
token,
});
for await (const fragment of chatResponse.text) {
Expand All @@ -278,14 +281,14 @@ export default class ParticipantController {
stream: vscode.ChatResponseStream,
token: vscode.CancellationToken
): Promise<ChatResult> {
const messages = await Prompts.generic.buildMessages({
const modelInput = await Prompts.generic.buildMessages({
request,
context,
connectionNames: this._getConnectionNames(),
});

await this.streamChatResponseContentWithCodeActions({
messages,
modelInput,
token,
stream,
});
Expand Down Expand Up @@ -334,14 +337,14 @@ export default class ParticipantController {
request: vscode.ChatRequest;
token: vscode.CancellationToken;
}): Promise<PromptIntent> {
const messages = await Prompts.intent.buildMessages({
const modelInput = await Prompts.intent.buildMessages({
connectionNames: this._getConnectionNames(),
request,
context,
});

const responseContent = await this.getChatResponseContent({
messages,
modelInput,
token,
});

Expand Down Expand Up @@ -708,7 +711,7 @@ export default class ParticipantController {
connectionNames: this._getConnectionNames(),
});
const responseContentWithNamespace = await this.getChatResponseContent({
messages: messagesWithNamespace,
modelInput: messagesWithNamespace,
token,
});
const { databaseName, collectionName } =
Expand Down Expand Up @@ -1043,7 +1046,7 @@ export default class ParticipantController {
return schemaRequestChatResult(context.history);
}

const messages = await Prompts.schema.buildMessages({
const modelInput = await Prompts.schema.buildMessages({
request,
context,
databaseName,
Expand All @@ -1054,7 +1057,7 @@ export default class ParticipantController {
...(sampleDocuments ? { sampleDocuments } : {}),
});
await this.streamChatResponse({
messages,
modelInput,
stream,
token,
});
Expand Down Expand Up @@ -1147,7 +1150,7 @@ export default class ParticipantController {
);
}

const messages = await Prompts.query.buildMessages({
const modelInput = await Prompts.query.buildMessages({
request,
context,
databaseName,
Expand All @@ -1158,7 +1161,7 @@ export default class ParticipantController {
});

await this.streamChatResponseContentWithCodeActions({
messages,
modelInput,
stream,
token,
});
Expand Down Expand Up @@ -1230,14 +1233,14 @@ export default class ParticipantController {
]
): Promise<void> {
const [request, context, stream, token] = args;
const messages = await Prompts.generic.buildMessages({
const modelInput = await Prompts.generic.buildMessages({
request,
context,
connectionNames: this._getConnectionNames(),
});

await this.streamChatResponseContentWithCodeActions({
messages,
modelInput,
stream,
token,
});
Expand Down
5 changes: 5 additions & 0 deletions src/participant/prompts/intent.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import type { InternalPromptPurpose } from '../../telemetry/telemetryService';
import type { PromptArgsBase } from './promptBase';
import { PromptBase } from './promptBase';

Expand Down Expand Up @@ -47,4 +48,8 @@ Docs`;
return 'Default';
}
}

protected get internalPurposeForTelemetry(): InternalPromptPurpose {
return 'intent';
}
}
5 changes: 5 additions & 0 deletions src/participant/prompts/namespace.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import type { InternalPromptPurpose } from '../../telemetry/telemetryService';
import type { PromptArgsBase } from './promptBase';
import { PromptBase } from './promptBase';

Expand Down Expand Up @@ -50,4 +51,8 @@ No names found.
const collectionName = text.match(COL_NAME_REGEX)?.[1].trim();
return { databaseName, collectionName };
}

protected get internalPurposeForTelemetry(): InternalPromptPurpose {
return 'namespace';
}
}
55 changes: 50 additions & 5 deletions src/participant/prompts/promptBase.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import * as vscode from 'vscode';
import type { ChatResult, ParticipantResponseType } from '../constants';
import type {
InternalPromptPurpose,
ParticipantPromptProperties,
} from '../../telemetry/telemetryService';

export interface PromptArgsBase {
request: {
Expand All @@ -10,14 +14,31 @@ export interface PromptArgsBase {
connectionNames: string[];
}

export interface UserPromptResponse {
prompt: string;
hasSampleDocs: boolean;
}

export interface ModelInput {
messages: vscode.LanguageModelChatMessage[];
stats: ParticipantPromptProperties;
}

export abstract class PromptBase<TArgs extends PromptArgsBase> {
protected abstract getAssistantPrompt(args: TArgs): string;

protected getUserPrompt(args: TArgs): Promise<string> {
return Promise.resolve(args.request.prompt);
protected get internalPurposeForTelemetry(): InternalPromptPurpose {
return undefined;
}

async buildMessages(args: TArgs): Promise<vscode.LanguageModelChatMessage[]> {
protected getUserPrompt(args: TArgs): Promise<UserPromptResponse> {
return Promise.resolve({
prompt: args.request.prompt,
hasSampleDocs: false,
});
}

async buildMessages(args: TArgs): Promise<ModelInput> {
let historyMessages = this.getHistoryMessages(args);
// If the current user's prompt is a connection name, and the last
// message was to connect. We want to use the last
Expand Down Expand Up @@ -49,13 +70,37 @@ export abstract class PromptBase<TArgs extends PromptArgsBase> {
}
}

return [
const { prompt, hasSampleDocs } = await this.getUserPrompt(args);
const messages = [
// eslint-disable-next-line new-cap
vscode.LanguageModelChatMessage.Assistant(this.getAssistantPrompt(args)),
...historyMessages,
// eslint-disable-next-line new-cap
vscode.LanguageModelChatMessage.User(await this.getUserPrompt(args)),
vscode.LanguageModelChatMessage.User(prompt),
];

return {
messages,
stats: this.getStats(messages, args, hasSampleDocs),
};
}

protected getStats(
messages: vscode.LanguageModelChatMessage[],
{ request, context }: TArgs,
hasSampleDocs: boolean
): ParticipantPromptProperties {
return {
total_message_length: messages.reduce(
(acc, message) => acc + message.content.length,
0
),
user_input_length: request.prompt.length,
has_sample_documents: hasSampleDocs,
command: request.command || 'generic',
history_size: context.history.length,
internal_purpose: this.internalPurposeForTelemetry,
};
}

// When passing the history to the model we only want contextual messages
Expand Down
20 changes: 11 additions & 9 deletions src/participant/prompts/query.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import * as vscode from 'vscode';
import type { Document } from 'bson';

import { getStringifiedSampleDocuments } from '../sampleDocuments';
import type { PromptArgsBase, UserPromptResponse } from './promptBase';
import { codeBlockIdentifier } from '../constants';
import type { PromptArgsBase } from './promptBase';
import { PromptBase } from './promptBase';

interface QueryPromptArgs extends PromptArgsBase {
Expand Down Expand Up @@ -59,21 +59,23 @@ db.getCollection('');\n`;
request,
schema,
sampleDocuments,
}: QueryPromptArgs): Promise<string> {
}: QueryPromptArgs): Promise<UserPromptResponse> {
let prompt = request.prompt;
prompt += `\nDatabase name: ${databaseName}\n`;
prompt += `Collection name: ${collectionName}\n`;
if (schema) {
prompt += `Collection schema: ${schema}\n`;
}
if (sampleDocuments) {
prompt += await getStringifiedSampleDocuments({
sampleDocuments,
prompt,
});
}

return prompt;
const sampleDocumentsPrompt = await getStringifiedSampleDocuments({
sampleDocuments,
prompt,
});

return {
prompt: `${prompt}${sampleDocumentsPrompt}`,
hasSampleDocs: !!sampleDocumentsPrompt,
};
}

get emptyRequestResponse(): string {
Expand Down
15 changes: 9 additions & 6 deletions src/participant/prompts/schema.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import type { UserPromptResponse } from './promptBase';
import { PromptBase, type PromptArgsBase } from './promptBase';

export const DOCUMENTS_TO_SAMPLE_FOR_SCHEMA_PROMPT = 100;
Expand All @@ -11,7 +12,6 @@ export interface SchemaPromptArgs extends PromptArgsBase {
collectionName: string;
schema: string;
amountOfDocumentsSampled: number;
connectionNames: string[];
}

export class SchemaPrompt extends PromptBase<SchemaPromptArgs> {
Expand All @@ -30,13 +30,16 @@ Amount of documents sampled: ${amountOfDocumentsSampled}.`;
collectionName,
request,
schema,
}: SchemaPromptArgs): Promise<string> {
}: SchemaPromptArgs): Promise<UserPromptResponse> {
const prompt = request.prompt;
return Promise.resolve(`${
prompt ? `The user provided additional information: "${prompt}"\n` : ''
}Database name: ${databaseName}
return Promise.resolve({
prompt: `${
prompt ? `The user provided additional information: "${prompt}"\n` : ''
}Database name: ${databaseName}
Collection name: ${collectionName}
Schema:
${schema}`);
${schema}`,
hasSampleDocs: false,
});
}
}
Loading

0 comments on commit 6d7150f

Please sign in to comment.