Skip to content

Commit

Permalink
fix(chat): add message content to blank message request VSCODE-626 (#843
Browse files Browse the repository at this point in the history
)
  • Loading branch information
Anemy authored Sep 30, 2024
1 parent beb8d72 commit 0a5a76f
Show file tree
Hide file tree
Showing 3 changed files with 228 additions and 98 deletions.
85 changes: 76 additions & 9 deletions src/participant/participant.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import * as vscode from 'vscode';
import { getSimplifiedSchema, parseSchema } from 'mongodb-schema';
import type { Document } from 'bson';
import type { Reference } from 'mongodb-rag-core';
import util from 'util';

import { createLogger } from '../logging';
import type ConnectionController from '../connectionController';
Expand Down Expand Up @@ -138,6 +139,12 @@ export default class ParticipantController {
errorName = ParticipantErrorTypes.OTHER;
}

log.error('Participant encountered an error', {
command,
error_code: errorCode,
error_name: errorName,
});

this._telemetryService.track(
TelemetryEventTypes.PARTICIPANT_RESPONSE_FAILED,
{
Expand Down Expand Up @@ -177,9 +184,26 @@ export default class ParticipantController {
throw new Error('Copilot model not found');
}

log.info('Sending request to model', {
messages: modelInput.messages.map(
(message: vscode.LanguageModelChatMessage) =>
util.inspect({
role: message.role,
contentLength: message.content.length,
})
),
});
this._telemetryService.trackCopilotParticipantPrompt(modelInput.stats);

return await model.sendRequest(modelInput.messages, {}, token);
const modelResponse = await model.sendRequest(
modelInput.messages,
{},
token
);

log.info('Model response received');

return modelResponse;
}

async streamChatResponse({
Expand Down Expand Up @@ -267,6 +291,11 @@ export default class ParticipantController {
identifier: codeBlockIdentifier,
});

log.info('Streamed response to chat', {
outputLength,
hasCodeBlock,
});

return {
outputLength,
hasCodeBlock,
Expand Down Expand Up @@ -376,6 +405,10 @@ export default class ParticipantController {
token,
});

log.info('Received intent response from model', {
responseContentLength: responseContent.length,
});

return Prompts.intent.getIntentFromModelResponse(responseContent);
}

Expand Down Expand Up @@ -738,14 +771,41 @@ export default class ParticipantController {
request,
connectionNames: this._getConnectionNames(),
});
const responseContentWithNamespace = await this.getChatResponseContent({
modelInput: messagesWithNamespace,
token,
});
const { databaseName, collectionName } =
Prompts.namespace.extractDatabaseAndCollectionNameFromResponse(
responseContentWithNamespace
);

let {
databaseName,
collectionName,
}: {
databaseName: string | undefined;
collectionName: string | undefined;
} = {
databaseName: undefined,
collectionName: undefined,
};

// When there's no user message content we can
// skip the request to the model. This would happen with /schema.
if (Prompts.doMessagesContainUserInput(messagesWithNamespace.messages)) {
// VSCODE-626: When there's an empty message sent to the ai model,
// it currently errors (not on insiders, only main VSCode).
// Here we're defaulting to have some content as a workaround.
// TODO: Remove this when the issue is fixed.
messagesWithNamespace.messages[
messagesWithNamespace.messages.length - 1
].content =
messagesWithNamespace.messages[
messagesWithNamespace.messages.length - 1
].content.trim() || 'see previous messages';

const responseContentWithNamespace = await this.getChatResponseContent({
modelInput: messagesWithNamespace,
token,
});
({ databaseName, collectionName } =
Prompts.namespace.extractDatabaseAndCollectionNameFromResponse(
responseContentWithNamespace
));
}

// See if there's a namespace set in the
// chat metadata we can fallback to if the model didn't find it.
Expand All @@ -757,6 +817,11 @@ export default class ParticipantController {
collectionName: collectionNameFromMetadata,
} = this._chatMetadataStore.getChatMetadata(chatId) ?? {};

log.info('Namespaces found in chat', {
databaseName: databaseName || databaseNameFromMetadata,
collectionName: collectionName || collectionNameFromMetadata,
});

return {
databaseName: databaseName || databaseNameFromMetadata,
collectionName: collectionName || collectionNameFromMetadata,
Expand Down Expand Up @@ -831,6 +896,8 @@ export default class ParticipantController {
context: vscode.ChatContext;
stream: vscode.ChatResponseStream;
}): ChatResult {
log.info('Participant asked user to connect');

stream.markdown(
"Looks like you aren't currently connected, first let's get you connected to the cluster we'd like to create this query to run against.\n\n"
);
Expand Down
20 changes: 19 additions & 1 deletion src/participant/prompts/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import type * as vscode from 'vscode';
import * as vscode from 'vscode';

import { GenericPrompt } from './generic';
import { IntentPrompt } from './intent';
Expand All @@ -16,4 +16,22 @@ export class Prompts {
public static isPromptEmpty(request: vscode.ChatRequest): boolean {
return !request.prompt || request.prompt.trim().length === 0;
}

// Check if any of the messages contain user input.
// This is useful since when there's no user input in any
// messages, we can skip some additional processing.
public static doMessagesContainUserInput(
messages: vscode.LanguageModelChatMessage[]
): boolean {
for (const message of messages) {
if (
message.role === vscode.LanguageModelChatMessageRole.User &&
message.content.trim().length > 0
) {
return true;
}
}

return false;
}
}
Loading

0 comments on commit 0a5a76f

Please sign in to comment.