Skip to content

Commit

Permalink
Add Ollama Config
Browse files Browse the repository at this point in the history
  • Loading branch information
KTheMan committed Jan 25, 2025
1 parent e66d337 commit 47be418
Show file tree
Hide file tree
Showing 8 changed files with 116 additions and 7 deletions.
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ UnKenny is a module for [Foundry Virtual Tabletop](https://foundryvtt.com/). It

__Be mindful which modules you enable.__

If you are storing your OpenAI API key in UnKenny, you are entrusting it to us, and by extension, to FoundryVTT. The modularity of Foundry is one of its big selling points, but it is also an invitation to arbitrary code execution. Any module you install could, in principle, inject some code that steals your API key. Foundry takes some countermeasures like scoping of modules, but ultimately it is very hard to defend against attacks from within your application.
If you are storing your OpenAI or Ollama API key in UnKenny, you are entrusting it to us, and by extension, to FoundryVTT. The modularity of Foundry is one of its big selling points, but it is also an invitation to arbitrary code execution. Any module you install could, in principle, inject some code that steals your API key. Foundry takes some countermeasures like scoping of modules, but ultimately it is very hard to defend against attacks from within your application.

For that reason, monitor your API key usage every now and then. If ever you find a module that has stolen your key, please
* [Revoke your key.](https://help.openai.com/en/articles/4936817-i-see-suspicious-activity-on-my-account-what-do-i-do)
Expand All @@ -21,6 +21,8 @@ For that reason, monitor your API key usage every now and then. If ever you find

After enabling the module for a world, configure it in the global settings. Selecting a [Large Language Model](https://en.wikipedia.org/wiki/Large_language_model) is required. If you want to use one of OpenAI's models, you will also need to [set up and pay for an API key](https://blog.streamlit.io/beginners-guide-to-openai-api/). Note that different models may have different capabilties and also different usage costs.

If you want to use Ollama models, you will need to set up the Ollama endpoint and API key. After configuring these, click the "Fetch Ollama Models" button to populate the models list with available Ollama models.

The other parameters are set to reasonable defaults.

![UnKenny Game Settings](https://raw.githubusercontent.com/thecomamba/unkenny/main/img/game_settings.png)
Expand Down
14 changes: 14 additions & 0 deletions src/apps/unkenny-sheet.hbs
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,20 @@
</select>
</div>

<div class='form-field'>
<label>{{localize "unkenny.settings.ollamaEndpoint"}}</label>
<input name="ollamaEndpoint" type="text" value="{{ollamaEndpoint}}">
</div>

<div class='form-field'>
<label>{{localize "unkenny.settings.ollamaApiKey"}}</label>
<input name="ollamaApiKey" type="text" value="{{ollamaApiKey}}">
</div>

<button type="button" onclick="this.closest('.document-sheet').fetchAndSetOllamaModels()">
{{localize "unkenny.sheet.fetchOllamaModels"}}
</button>

<button type="submit">
<i class="fa fa-check"></i>
{{localize "unkenny.sheet.save"}}
Expand Down
20 changes: 19 additions & 1 deletion src/apps/unkenny-sheet.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@

import { findActorWithAlias } from "../scripts/chat-message-request.js";
import { getModelToTextMap } from "../scripts/models.js";
import { PREFIX_OPTIONS } from "../scripts/prefix.js";
import { fetchOllamaModels } from "../scripts/shared.js";
import { updateModelsWithOllama } from "../scripts/models.js";

class UnKennySheet extends DocumentSheet {
constructor(actor) {
Expand All @@ -23,6 +24,9 @@ class UnKennySheet extends DocumentSheet {
await this.initContextWithActorData();
}

this.context.ollamaEndpoint = game.settings.get("unkenny", "ollamaEndpoint");
this.context.ollamaApiKey = game.settings.get("unkenny", "ollamaApiKey");

return this.context;
}

Expand Down Expand Up @@ -63,6 +67,10 @@ class UnKennySheet extends DocumentSheet {
let prefix = event.target.value;
this.setContextPrefix(prefix);
}
if (event.target.name == "ollamaEndpoint" || event.target.name == "ollamaApiKey") {
this.context.ollamaEndpoint = event.target.value;
this.context.ollamaApiKey = event.target.value;
}
}

setContextModel(model) {
Expand Down Expand Up @@ -99,6 +107,9 @@ class UnKennySheet extends DocumentSheet {
await this.updateFlag(formData, "prefix");
}

await this.object.setFlag("unkenny", "ollamaEndpoint", formData.ollamaEndpoint);
await this.object.setFlag("unkenny", "ollamaApiKey", formData.ollamaApiKey);

const actor = await findActorWithAlias(formData.alias);
if (!actor) {
ui.notifications.error(game.i18n.localize("unkenny.sheet.settingAliasFailed"));
Expand All @@ -113,6 +124,13 @@ class UnKennySheet extends DocumentSheet {
await this.object.unsetFlag("unkenny", key);
}
}

async fetchAndSetOllamaModels() {
const models = await fetchOllamaModels(this.context.ollamaEndpoint, this.context.ollamaApiKey);
await updateModelsWithOllama(models);
this.initModels();
this.render();
}
}

export { UnKennySheet };
4 changes: 3 additions & 1 deletion src/scripts/llm.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { collectChatMessages } from "./collecting-chat-messages.js";
import { getResponseFromLocalLLM } from "../scripts/local-llm.js";
import { getResponseFromOpenAI } from "../scripts/openai-api.js";
import { getResponseFromOpenAI, getResponseFromOllama } from "../scripts/openai-api.js";
import { isLocal } from "./models.js";
import { llmParametersAndDefaults } from "./settings.js";

Expand Down Expand Up @@ -46,6 +46,8 @@ async function generateResponse(actor, input, parameters) {
let response;
if (isLocal(parameters.model)) {
response = await getResponseFromLocalLLM(parameters, messages);
} else if (parameters.model.startsWith("Ollama:")) {
response = await getResponseFromOllama(parameters, messages);
} else {
response = await getResponseFromOpenAI(parameters, messages);
}
Expand Down
8 changes: 7 additions & 1 deletion src/scripts/models.js
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,10 @@ function getTokenLimit(model) {
return foundModel ? foundModel.limit : undefined;
}

export { getModelToTextMap, getLocalModels, getOpenAiModels, getTokenLimit, isLocal };
async function updateModelsWithOllama(ollamaModels) {
ollamaModels.forEach(model => {
MODELS_MAP.set(model.id, { text: `Ollama: ${model.name}`, limit: model.token_limit });
});
}

export { getModelToTextMap, getLocalModels, getOpenAiModels, getTokenLimit, isLocal, updateModelsWithOllama };
30 changes: 29 additions & 1 deletion src/scripts/openai-api.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,32 @@ async function getResponseFromOpenAI(parameters, messages) {
}
}

export { getResponseFromOpenAI, roughNumberOfTokensForOpenAi };
async function getResponseFromOllama(parameters, messages) {
try {
const response = await fetch(`${parameters.ollamaEndpoint}/generate`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${parameters.ollamaApiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: parameters.model,
messages: messages,
max_tokens: parameters.maxNewTokens,
temperature: parameters.temperature,
frequency_penalty: parameters.repetitionPenalty
})
});
if (!response.ok) {
throw new Error(`Failed to generate response: ${response.statusText}`);
}
const data = await response.json();
return data.choices[0].message.content;
} catch (error) {
const errorMessage = game.i18n.format('unkenny.llm.ollamaError', { error: error.message });
ui.notifications.error(errorMessage);
return;
}
}

export { getResponseFromOpenAI, roughNumberOfTokensForOpenAi, getResponseFromOllama };
22 changes: 21 additions & 1 deletion src/scripts/settings.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ function llmParametersAndDefaults() {
maxNewTokens: 250,
repetitionPenalty: 0.0,
temperature: 1.0,
prefix: ""
prefix: "",
ollamaEndpoint: "",
ollamaApiKey: ""
};
}

Expand Down Expand Up @@ -100,6 +102,24 @@ function registerGameParameters() {
choices: PREFIX_OPTIONS,
default: params.prefix
});

game.settings.register("unkenny", "ollamaEndpoint", {
name: game.i18n.localize("unkenny.settings.ollamaEndpoint"),
hint: game.i18n.localize("unkenny.settings.ollamaEndpointDescription"),
scope: "world",
config: true,
type: String,
default: params.ollamaEndpoint
});

game.settings.register("unkenny", "ollamaApiKey", {
name: game.i18n.localize("unkenny.settings.ollamaApiKey"),
hint: game.i18n.localize("unkenny.settings.ollamaApiKeyDescription"),
scope: "world",
config: true,
type: String,
default: params.ollamaApiKey
});
}

export { llmParametersAndDefaults, registerGameParameters };
21 changes: 20 additions & 1 deletion src/scripts/shared.js
Original file line number Diff line number Diff line change
Expand Up @@ -45,4 +45,23 @@ async function loadExternalModule(name) {
}
}

export { isUnKenny, loadExternalModule };
async function fetchOllamaModels(endpoint, apiKey) {
try {
const response = await fetch(`${endpoint}/models`, {
headers: {
'Authorization': `Bearer ${apiKey}`
}
});
if (!response.ok) {
throw new Error(`Failed to fetch models: ${response.statusText}`);
}
const data = await response.json();
return data.models;
} catch (error) {
const errorMessage = game.i18n.format("unkenny.shared.ollamaFetchFailed", { error: error.message });
ui.notifications.error(errorMessage);
return [];
}
}

export { isUnKenny, loadExternalModule, fetchOllamaModels };

0 comments on commit 47be418

Please sign in to comment.