+
Settings
-
-
-
-
+ class="mr-2"
+ severity="secondary" />
-
-
-
+ severity="secondary"
+ icon="pi pi-ellipsis-v"
+ @click="toggle" aria-haspopup="true"
+ aria-controls="overlay_menu" />
+
+
@@ -97,19 +130,12 @@ function onClickLoad() {
General
- Backend
+
Chatbot
Other
-
-
-
@@ -126,12 +152,12 @@ function onClickLoad() {
class="w-100"/>
-
+
API Key
+ class="w-full"/>
@@ -142,18 +168,35 @@ function onClickLoad() {
Hightlight Keywords
-
-
-
-
+
+
+
@@ -215,4 +258,14 @@ function onClickLoad() {
.label {
font-size: small;
}
+.keyword {
+ padding: 0.2rem 0;
+ display: flex;
+ flex-direction: row;
+ justify-content: space-between;
+}
+.keyword:hover {
+ background-color: #f8f8f8;
+ font-weight: bold;
+}
\ No newline at end of file
diff --git a/web/src/main.js b/web/src/main.js
index 03e9b12..6eaabc7 100644
--- a/web/src/main.js
+++ b/web/src/main.js
@@ -7,8 +7,11 @@ import Aura from '@primevue/themes/aura';
import App from './App.vue';
import { createPinia } from 'pinia';
import { useDataStore } from "./DataStore";
+import { ai_helper } from "./utils/ai_helper";
import ToastService from 'primevue/toastservice';
+window.ai_helper = ai_helper;
+
const pinia = createPinia()
const app = createApp(App);
@@ -24,8 +27,4 @@ app.use(ToastService);
const store = useDataStore();
window.store = store;
-// load the data from the local storage
-store.loadSettingsFromLocalStorage();
-
-
app.mount('#app');
diff --git a/web/src/utils/ai_helper.js b/web/src/utils/ai_helper.js
index d5a9d4d..c375632 100644
--- a/web/src/utils/ai_helper.js
+++ b/web/src/utils/ai_helper.js
@@ -61,6 +61,13 @@ export const ai_helper = {
config
);
}
+
+ if (config.service_type == 'claude') {
+ return await this._ask_claude(
+ question,
+ config
+ );
+ }
},
_ask_openai: async function(question, config) {
@@ -91,14 +98,14 @@ export const ai_helper = {
"type": "json_object",
},
"messages": [
- {
- "role": "system",
- "content": "You are a helpful assistant."
- },
- {
- "role": "user",
- "content": question
- }
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": question
+ }
]
})
}
@@ -106,10 +113,9 @@ export const ai_helper = {
const data = await rsp.json();
- console.log(data);
-
let s = data.choices[0].message.content;
let result = JSON.parse(s);
+ console.log("* openai result: ", result);
// maybe format the response here before return
let ret = {
@@ -124,6 +130,61 @@ export const ai_helper = {
},
_ask_claude: async function(question, config) {
+ let endpoint = config.endpoint;
+
+ // e.g., "model_name": "gpt-4o-mini",
+ let model_name = config.model_name;
+
+ // customize header
+ let headers = {
+ "anthropic-version": "2023-06-01",
+ "content-type": "application/json",
+ "anthropic-dangerous-direct-browser-access": "true",
+ "x-api-key": config.api_key,
+ };
+
+ const rsp = await fetch(endpoint, {
+ method: "POST",
+ headers: headers,
+ body: JSON.stringify({
+ model: model_name,
+ max_tokens: 1024,
+ messages: [
+ {
+ role: "user",
+ content: [
+ {
+ type: "text",
+ text: question
+ },
+ ],
+ },
+ // {
+ // "role": "assistant",
+ // "content": [{
+ // "type": "text",
+ // "text": "Here is the JSON requested:\n{"
+ // }]
+ // }
+ ],
+ }),
+ });
+
+ const data = await rsp.json();
+
+ console.log(data)
+
+ let s = data.content[0].text;
+ let result = JSON.parse(s);
+ // let result = JSON.parse("{" + s + "}");
+ console.log("* claude result: ", result);
+
+ // maybe format the response here before return
+ let ret = {
+ reason: result['reason'],
+ answer: result['category']
+ };
+ return ret;
},
_ask_ollama: async function(question, config) {