From a8468db7ea6eb2cbb72ccd3f8f13a6e162c780ef Mon Sep 17 00:00:00 2001 From: Huan He <140455389+huanhe4096@users.noreply.github.com> Date: Mon, 25 Nov 2024 14:03:33 -0500 Subject: [PATCH] update 0.7.0 for model settings --- README.md | 5 +++++ web/src/DataStore.js | 28 ++++++++++++++++++++++++---- web/src/components/AIHelper.vue | 2 -- web/src/components/SettingPanel.vue | 18 ++++++++++++++++++ web/src/utils/ai_helper.js | 14 +++++++++++++- 5 files changed, 60 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index b8bae40..d259ff7 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,11 @@ The translate, llama model, and other services can be started separately. Please # Updates +## 0.7.0 + +- Add "temperature slider" for AI model +- Add "system role" prompt for AI model + ## 0.6.8 - Add try-catch for importing JSON config diff --git a/web/src/DataStore.js b/web/src/DataStore.js index 13687ab..2fffb66 100644 --- a/web/src/DataStore.js +++ b/web/src/DataStore.js @@ -3,7 +3,7 @@ import { useToast } from "primevue/usetoast"; export const useDataStore = defineStore('jarvis', { state: () => ({ - version: '0.6.8', + version: '0.7.0', config: { api_server_url: "http://localhost:8123", api_server_token: "", @@ -34,7 +34,9 @@ state: () => ({ "model_name": "gpt-4o", "endpoint": "https://api.openai.com/v1/chat/completions", "enabled": true, - "api_key": "" + "api_key": "", + "temperature": 0, + "system_prompt": "You are a helpful assistant.", }, claude: { "id": "claude", @@ -43,7 +45,9 @@ state: () => ({ "model_name": "claude-3-5-haiku-20241022", "endpoint": "https://api.anthropic.com/v1/messages", "enabled": true, - "api_key": "" + "api_key": "", + "temperature": 0, + "system_prompt": "You are a helpful assistant.", }, // llama: { // "id": "llama", @@ -433,7 +437,23 @@ actions: { // copy the items from json to store.config for (let key in this.config) { if (json.hasOwnProperty(key)) { - this.config[key] = json[key]; + // special rule for ai models + if (key == 'ai_models') { + // for this case, search all settings from the json + for (let model_id in json[key]) { + if (this.config[key].hasOwnProperty(model_id)) { + for (let model_attribute in json[key][model_id]) { + this.config[key][model_id][model_attribute] = json[key][model_id][model_attribute]; + } + } else { + // just copy the whole content if not found + // which means the localStorage has custmoized settings + this.config[key][model_id] = json[key][model_id]; + } + } + } else { + this.config[key] = json[key]; + } } } }, diff --git a/web/src/components/AIHelper.vue b/web/src/components/AIHelper.vue index 5bcec65..36de5a8 100644 --- a/web/src/components/AIHelper.vue +++ b/web/src/components/AIHelper.vue @@ -26,8 +26,6 @@ async function onClickReview(model_id) { // set flag status.value[model_id] = 'reviewing'; - console.log(`* AI Helper [${model_id}] is thinking ...`); - // first generate the prompot let question = ai_helper.generateQuestionFromTemplate( store.llm_prompt_template, diff --git a/web/src/components/SettingPanel.vue b/web/src/components/SettingPanel.vue index c8ceb0b..8729b69 100644 --- a/web/src/components/SettingPanel.vue +++ b/web/src/components/SettingPanel.vue @@ -257,6 +257,24 @@ const toggle = (event) => { + +
+ Temperature (0-1) +
+
+ +
+ +
+ System Prompt +
+
+ +
diff --git a/web/src/utils/ai_helper.js b/web/src/utils/ai_helper.js index 594374a..dd5a9cf 100644 --- a/web/src/utils/ai_helper.js +++ b/web/src/utils/ai_helper.js @@ -72,6 +72,7 @@ export const ai_helper = { }, _ask_openai: async function(question, config) { + console.log(`* asking openai ...`); // e.g., "endpoint": "https://api.openai.com/v1/chat/completions", let endpoint = config.endpoint; @@ -95,13 +96,14 @@ export const ai_helper = { body: JSON.stringify({ "model": model_name, "format": "json", + "temperature": config.temperature, "response_format": { "type": "json_object", }, "messages": [ { "role": "system", - "content": "You are a helpful assistant." + "content": config.system_prompt }, { "role": "user", @@ -131,9 +133,11 @@ export const ai_helper = { }, _ask_gemini: async function(question, config) { + console.log(`* asking gemini ...`); }, _ask_claude: async function(question, config) { + console.log(`* asking claude ...`); let endpoint = config.endpoint; // e.g., "model_name": "gpt-4o-mini", @@ -153,6 +157,7 @@ export const ai_helper = { body: JSON.stringify({ model: model_name, max_tokens: 4096, + temperature: config.temperature, messages: [ { role: "user", @@ -163,6 +168,13 @@ export const ai_helper = { }, ], }, + { + role: "assistant", + content: [{ + type: "text", + text: config.system_prompt + }] + }, // { // "role": "assistant", // "content": [{