From ef4bcbef5b1e065a9e56ea295edf92b07c7be4de Mon Sep 17 00:00:00 2001 From: fengsh Date: Fri, 13 Oct 2023 16:03:20 -0400 Subject: [PATCH 1/3] introduce biomedical mask --- app/masks/en.ts | 39 +++++++++++++++++++++++++++++++++++++++ app/masks/index.ts | 2 +- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/app/masks/en.ts b/app/masks/en.ts index 1ab40d59b03..7ebccfe24e6 100644 --- a/app/masks/en.ts +++ b/app/masks/en.ts @@ -131,4 +131,43 @@ export const EN_MASKS: BuiltinMask[] = [ builtin: true, createdAt: 1688899480413, }, + { + avatar: "1f977", + name: "Biomedical researcher assistant", + context: [ + { + id: "biomedical-researcher-assistant-1", + role: "user", + content: "You are an assistant to a biomedical researcher.", + date: "", + }, + { + id: "biomedical-researcher-assistant-2", + role: "user", + content: + "Your role is to contextualise the user's findings with biomedical background knowledge. If provided with a list, please give granular feedback about the individual entities, your knowledge about them, and what they may mean in the context of the research.", + date: "", + }, + { + id: "biomedical-researcher-assistant-3", + role: "user", + content: + "You can ask the user to provide explanations and more background at any time, for instance on the treatment a patient has received, or the experimental background. But for now, wait for the user to ask a question.", + date: "", + }, + ], + modelConfig: { + model: "gpt-3.5-turbo", + temperature: 0.7, + max_tokens: 2000, + presence_penalty: 0, + frequency_penalty: 0, + sendMemory: true, + historyMessageCount: 4, + compressMessageLengthThreshold: 2000, + }, + lang: "en", + builtin: true, + createdAt: 1697222692762, + }, ]; diff --git a/app/masks/index.ts b/app/masks/index.ts index 4db4ac88d80..a32169bee22 100644 --- a/app/masks/index.ts +++ b/app/masks/index.ts @@ -21,6 +21,6 @@ export const BUILTIN_MASK_STORE = { }, }; -export const BUILTIN_MASKS: BuiltinMask[] = [...CN_MASKS, ...EN_MASKS].map( +export const BUILTIN_MASKS: BuiltinMask[] = [/*...CN_MASKS,*/ ...EN_MASKS].map( (m) => BUILTIN_MASK_STORE.add(m), ); From 78a6d52a37d65f673f54ffa5bc66fc35d8e688e7 Mon Sep 17 00:00:00 2001 From: fengsh Date: Fri, 13 Oct 2023 16:16:39 -0400 Subject: [PATCH 2/3] generate globally unique id as session id --- app/store/chat.ts | 8 +++++--- package.json | 2 ++ yarn.lock | 10 ++++++++++ 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/app/store/chat.ts b/app/store/chat.ts index 269cc4a33c9..d45391b5e72 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -1,6 +1,6 @@ import { create } from "zustand"; import { persist } from "zustand/middleware"; - +import { v4 as uuidv4 } from "uuid"; import { trimTopic } from "../utils"; import Locale, { getLang } from "../locales"; @@ -20,6 +20,8 @@ import { estimateTokenLength } from "../utils/token"; import { nanoid } from "nanoid"; import { createPersistStore } from "../utils/store"; +const generateUniqId = () => uuidv4(); + export type ChatMessage = RequestMessage & { date: string; streaming?: boolean; @@ -66,7 +68,7 @@ export const BOT_HELLO: ChatMessage = createMessage({ function createEmptySession(): ChatSession { return { - id: nanoid(), + id: generateUniqId(), topic: DEFAULT_TOPIC, memoryPrompt: "", messages: [], @@ -640,7 +642,7 @@ export const useChatStore = createPersistStore( if (version < 3) { // migrate id to nanoid newState.sessions.forEach((s) => { - s.id = nanoid(); + s.id = generateUniqId(); s.messages.forEach((m) => (m.id = nanoid())); }); } diff --git a/package.json b/package.json index 0eed3bd9b5b..25b1419cb48 100644 --- a/package.json +++ b/package.json @@ -39,6 +39,7 @@ "sass": "^1.59.2", "spark-md5": "^3.0.2", "use-debounce": "^9.0.4", + "uuid": "^9.0.1", "zustand": "^4.3.8" }, "devDependencies": { @@ -48,6 +49,7 @@ "@types/react-dom": "^18.2.7", "@types/react-katex": "^3.0.0", "@types/spark-md5": "^3.0.2", + "@types/uuid": "^9.0.5", "cross-env": "^7.0.3", "eslint": "^8.49.0", "eslint-config-next": "13.4.19", diff --git a/yarn.lock b/yarn.lock index 8adbf7dc24c..bae3320bdb1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1565,6 +1565,11 @@ resolved "https://registry.npmmirror.com/@types/use-sync-external-store/-/use-sync-external-store-0.0.3.tgz#b6725d5f4af24ace33b36fafd295136e75509f43" integrity sha512-EwmlvuaxPNej9+T4v5AuBPJa2x2UOJVdjCtDHgcDqitUeOtjnJKJ+apYjVcAoBEMjKW1VVFGZLUb5+qqa09XFA== +"@types/uuid@^9.0.5": + version "9.0.5" + resolved "https://registry.yarnpkg.com/@types/uuid/-/uuid-9.0.5.tgz#25a71eb73eba95ac0e559ff3dd018fc08294acf6" + integrity sha512-xfHdwa1FMJ082prjSJpoEI57GZITiQz10r3vEJCHa2khEFQjKy91aWKz6+zybzssCvXUwE1LQWgWVwZ4nYUvHQ== + "@typescript-eslint/parser@^5.4.2 || ^6.0.0": version "6.4.0" resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-6.4.0.tgz#47e7c6e22ff1248e8675d95f488890484de67600" @@ -5937,6 +5942,11 @@ uuid@^9.0.0: resolved "https://registry.npmmirror.com/uuid/-/uuid-9.0.0.tgz#592f550650024a38ceb0c562f2f6aa435761efb5" integrity sha512-MXcSTerfPa4uqyzStbRoTgt5XIe3x5+42+q1sDuy3R5MDk66URdLMOZe5aPX/SQd+kuYAh0FdP/pO28IkQyTeg== +uuid@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-9.0.1.tgz#e188d4c8853cc722220392c424cd637f32293f30" + integrity sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA== + uvu@^0.5.0: version "0.5.6" resolved "https://registry.yarnpkg.com/uvu/-/uvu-0.5.6.tgz#2754ca20bcb0bb59b64e9985e84d2e81058502df" From 2682ba71ac3a6791bb9f6398e489da640d38c2ef Mon Sep 17 00:00:00 2001 From: fengsh Date: Tue, 17 Oct 2023 13:39:05 -0400 Subject: [PATCH 3/3] To communicate with biochatter, disable stream mode and add param session_id --- app/client/platforms/openai.ts | 3 ++- app/masks/en.ts | 2 +- app/store/chat.ts | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index fd4eb59ce77..dc8e90b54b5 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -70,11 +70,12 @@ export class ChatGPTApi implements LLMApi { presence_penalty: modelConfig.presence_penalty, frequency_penalty: modelConfig.frequency_penalty, top_p: modelConfig.top_p, + session_id: useChatStore.getState().currentSession().id, }; console.log("[Request] openai payload: ", requestPayload); - const shouldStream = !!options.config.stream; + const shouldStream = false; // !!options.config.stream; // const controller = new AbortController(); options.onController?.(controller); diff --git a/app/masks/en.ts b/app/masks/en.ts index 7ebccfe24e6..eebe9476aa3 100644 --- a/app/masks/en.ts +++ b/app/masks/en.ts @@ -157,7 +157,7 @@ export const EN_MASKS: BuiltinMask[] = [ }, ], modelConfig: { - model: "gpt-3.5-turbo", + model: "gpt-4", temperature: 0.7, max_tokens: 2000, presence_penalty: 0, diff --git a/app/store/chat.ts b/app/store/chat.ts index d45391b5e72..4713a974385 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -576,7 +576,7 @@ export const useChatStore = createPersistStore( ), config: { ...modelConfig, - stream: true, + stream: false, model: getSummarizeModel(session.mask.modelConfig.model), }, onUpdate(message) {