Skip to content
This repository has been archived by the owner on Sep 15, 2024. It is now read-only.

Commit

Permalink
Fix Client LLM API
Browse files Browse the repository at this point in the history
[+] fix(openai.ts): update max_tokens value to Math.max(modelConfig.max_tokens, 1024)
  • Loading branch information
H0llyW00dzZ committed Nov 8, 2023
1 parent e35a9ae commit cc5f57b
Showing 1 changed file with 6 additions and 11 deletions.
17 changes: 6 additions & 11 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,19 +62,17 @@ export class ChatGPTApi implements LLMApi {
* This method should be a member of the ChatGPTApi class, not nested inside another method
**/
private getNewStuff(
// Logic for determining `max_tokens` and `system_fingerprint` based on the model
// Logic for determining `system_fingerprint` based on the model
// note : codebase still looks bad hahaha, might refactor this later for list models name.
model: string,
max_tokens: number | undefined,
system_fingerprint: string | undefined
): { max_tokens?: number; system_fingerprint?: string } {
): { system_fingerprint?: string } {
const modelConfig = {
...useAppConfig.getState().modelConfig,
...useChatStore.getState().currentSession().mask.modelConfig,
};
if (model.includes("gpt-4-1106-preview") || model.includes("gpt-4-vision-preview")) {
return {
max_tokens: max_tokens !== undefined ? max_tokens : modelConfig.max_tokens,
system_fingerprint: system_fingerprint !== undefined ? system_fingerprint : modelConfig.system_fingerprint,
};
}
Expand Down Expand Up @@ -174,10 +172,9 @@ export class ChatGPTApi implements LLMApi {
return modelMap[inputModel] || inputModel;
}
const actualModel = getModelForInstructVersion(modelConfig.model);
const { max_tokens, system_fingerprint } = this.getNewStuff(
const { system_fingerprint } = this.getNewStuff(
modelConfig.model,
modelConfig.max_tokens,
modelConfig.system_fingerprint
modelConfig.system_fingerprint,
);

const requestPayloads = {
Expand All @@ -189,10 +186,8 @@ export class ChatGPTApi implements LLMApi {
presence_penalty: modelConfig.presence_penalty,
frequency_penalty: modelConfig.frequency_penalty,
top_p: modelConfig.top_p,
// beta test for new model's since it consumed much tokens
// max is 4096
...{ max_tokens }, // Spread the max_tokens value
// not yet ready
max_tokens: Math.max(modelConfig.max_tokens, 1024),
// not yet
//...{ system_fingerprint }, // Spread the system_fingerprint value
},
image: {
Expand Down

0 comments on commit cc5f57b

Please sign in to comment.