Skip to content

Commit

Permalink
Merge branch 'yodamaster726-main'
Browse files Browse the repository at this point in the history
ponderingdemocritus committed Nov 23, 2024
2 parents 998e55b + 5166970 commit 986d02e
Showing 10 changed files with 460 additions and 76 deletions.
11 changes: 2 additions & 9 deletions agent/src/index.ts
Original file line number Diff line number Diff line change
@@ -32,7 +32,7 @@ import readline from "readline";
import yargs from "yargs";
import path from "path";
import { fileURLToPath } from "url";
import { character } from "./character.ts";
import blobert from "./blobert.ts";
import type { DirectClient } from "@ai16z/client-direct";

const __filename = fileURLToPath(import.meta.url); // get the resolved path to the file
@@ -263,13 +263,6 @@ export function createAgent(
});
}

function intializeFsCache(baseDir: string, character: Character) {
const cacheDir = path.resolve(baseDir, character.id, "cache");

const cache = new CacheManager(new FsCacheAdapter(cacheDir));
return cache;
}

function intializeDbCache(character: Character, db: IDatabaseCacheAdapter) {
const cache = new CacheManager(new DbCacheAdapter(db, character.id));
return cache;
@@ -317,7 +310,7 @@ const startAgents = async () => {

let charactersArg = args.characters || args.character;

let characters = [character];
let characters = [blobert];

if (charactersArg) {
characters = await loadCharacters(charactersArg);
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -47,7 +47,8 @@
"dependencies": {
"ollama-ai-provider": "^0.16.1",
"optional": "^0.1.4",
"sharp": "^0.33.5"
"sharp": "^0.33.5",
"tslog": "^4.9.3"
},
"packageManager": "[email protected]+sha512.cce0f9de9c5a7c95bef944169cc5dfe8741abfb145078c0d508b868056848a87c81e626246cb60967cbd7fd29a6c062ef73ff840d96b3c86c40ac92cf4a813ee"
}
2 changes: 1 addition & 1 deletion packages/core/src/defaultCharacter.ts
Original file line number Diff line number Diff line change
@@ -5,7 +5,7 @@ export const defaultCharacter: Character = {
username: "eliza",
plugins: [],
clients: [],
modelProvider: ModelProviderName.OPENAI,
modelProvider: ModelProviderName.OLLAMA,
settings: {
secrets: {},
voice: {
6 changes: 4 additions & 2 deletions packages/core/src/embedding.ts
Original file line number Diff line number Diff line change
@@ -86,8 +86,10 @@ export async function embed(runtime: IAgentRuntime, input: string) {
// 3. Fallback to OpenAI embedding model
const embeddingModel = settings.USE_OPENAI_EMBEDDING
? "text-embedding-3-small"
: modelProvider.model?.[ModelClass.EMBEDDING] ||
models[ModelProviderName.OPENAI].model[ModelClass.EMBEDDING];
: runtime.character.modelProvider === ModelProviderName.OLLAMA
? settings.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large"
: modelProvider.model?.[ModelClass.EMBEDDING] ||
models[ModelProviderName.OPENAI].model[ModelClass.EMBEDDING];

if (!embeddingModel) {
throw new Error("No embedding model configured");
11 changes: 9 additions & 2 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
@@ -62,7 +62,12 @@ export async function generateText({
return "";
}

elizaLogger.log("Genarating text...");
elizaLogger.log("Generating text...");

elizaLogger.info("Generating text with options:", {
modelProvider: runtime.modelProvider,
model: modelClass,
});

const provider = runtime.modelProvider;
const endpoint =
@@ -84,6 +89,8 @@ export async function generateText({
model = runtime.getSetting("LLAMACLOUD_MODEL_SMALL");
}

elizaLogger.info("Selected model:", model);

const temperature = models[provider].settings.temperature;
const frequency_penalty = models[provider].settings.frequency_penalty;
const presence_penalty = models[provider].settings.presence_penalty;
@@ -709,7 +716,7 @@ export async function generateMessageResponse({
let retryLength = 1000; // exponential backoff
while (true) {
try {
elizaLogger.log("Genarating message response..");
elizaLogger.log("Generating message response..");

const response = await generateText({
runtime,
45 changes: 34 additions & 11 deletions packages/core/src/logger.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,11 @@
class ElizaLogger {
import settings from "./settings.ts";
import { Logger } from "tslog";

interface IElizaLogger extends Logger<IElizaLogger> {
progress(message: string): void;
}

class ElizaLogger implements IElizaLogger {
constructor() {
// Check if we're in Node.js environment
this.isNode =
@@ -7,7 +14,7 @@ class ElizaLogger {
process.versions.node != null;

// Set verbose based on environment
this.verbose = this.isNode ? process.env.verbose === "true" : false;
this.verbose = this.isNode ? settings.VERBOSE === "true" : false;
}

private isNode: boolean;
@@ -173,6 +180,7 @@ class ElizaLogger {
}
}

// @ts-expect-error- custom implementation
log(...strings) {
this.#logWithStyle(strings, {
fg: "white",
@@ -182,6 +190,7 @@ class ElizaLogger {
});
}

// @ts-expect-error- custom implementation
warn(...strings) {
this.#logWithStyle(strings, {
fg: "yellow",
@@ -191,6 +200,7 @@ class ElizaLogger {
});
}

// @ts-expect-error- custom implementation
error(...strings) {
this.#logWithStyle(strings, {
fg: "red",
@@ -200,6 +210,7 @@ class ElizaLogger {
});
}

// @ts-expect-error- custom implementation
info(...strings) {
this.#logWithStyle(strings, {
fg: "blue",
@@ -209,15 +220,7 @@ class ElizaLogger {
});
}

success(...strings) {
this.#logWithStyle(strings, {
fg: "green",
bg: "",
icon: "\u2713",
groupTitle: ` ${this.successesTitle}`,
});
}

// @ts-expect-error- custom implementation
debug(...strings) {
if (!this.verbose) return;
this.#logWithStyle(strings, {
@@ -228,6 +231,15 @@ class ElizaLogger {
});
}

success(...strings) {
this.#logWithStyle(strings, {
fg: "green",
bg: "",
icon: "\u2713",
groupTitle: ` ${this.successesTitle}`,
});
}

assert(...strings) {
this.#logWithStyle(strings, {
fg: "cyan",
@@ -236,6 +248,17 @@ class ElizaLogger {
groupTitle: ` ${this.assertsTitle}`,
});
}

progress(message: string) {
if (this.isNode) {
// Clear the current line and move cursor to beginning
process.stdout.clearLine(0);
process.stdout.cursorTo(0);
process.stdout.write(message);
} else {
console.log(message);
}
}
}

export const elizaLogger = new ElizaLogger();
30 changes: 30 additions & 0 deletions packages/core/src/runtime.ts
Original file line number Diff line number Diff line change
@@ -176,7 +176,9 @@ export class AgentRuntime implements IAgentRuntime {
return;
}

// Add the service to the services map
this.services.set(serviceType, service);
elizaLogger.success(`Service ${serviceType} registered successfully`);
}

/**
@@ -217,6 +219,12 @@ export class AgentRuntime implements IAgentRuntime {
cacheManager: ICacheManager;
logging?: boolean;
}) {
elizaLogger.info("Initializing AgentRuntime with options:", {
character: opts.character?.name,
modelProvider: opts.modelProvider,
characterModelProvider: opts.character?.modelProvider,
});

this.#conversationLength =
opts.conversationLength ?? this.#conversationLength;
this.databaseAdapter = opts.databaseAdapter;
@@ -280,10 +288,32 @@ export class AgentRuntime implements IAgentRuntime {
});

this.serverUrl = opts.serverUrl ?? this.serverUrl;

elizaLogger.info("Setting model provider...");
elizaLogger.info(
"- Character model provider:",
this.character.modelProvider
);
elizaLogger.info("- Opts model provider:", opts.modelProvider);
elizaLogger.info("- Current model provider:", this.modelProvider);

this.modelProvider =
this.character.modelProvider ??
opts.modelProvider ??
this.modelProvider;

elizaLogger.info("Selected model provider:", this.modelProvider);

// Validate model provider
if (!Object.values(ModelProviderName).includes(this.modelProvider)) {
elizaLogger.error("Invalid model provider:", this.modelProvider);
elizaLogger.error(
"Available providers:",
Object.values(ModelProviderName)
);
throw new Error(`Invalid model provider: ${this.modelProvider}`);
}

if (!this.serverUrl) {
elizaLogger.warn("No serverUrl provided, defaulting to localhost");
}
17 changes: 14 additions & 3 deletions packages/plugin-node/src/services/image.ts
Original file line number Diff line number Diff line change
@@ -63,16 +63,23 @@ export class ImageDescriptionService
env.backends.onnx.wasm.proxy = false;
env.backends.onnx.wasm.numThreads = 1;

elizaLogger.log("Downloading Florence model...");
elizaLogger.info("Downloading Florence model...");

this.model = await Florence2ForConditionalGeneration.from_pretrained(
this.modelId,
{
device: "gpu",
progress_callback: (progress) => {
if (progress.status === "downloading") {
elizaLogger.log(
`Model download progress: ${JSON.stringify(progress)}`
const percent = (
(progress.loaded / progress.total) *
100
).toFixed(1);
const dots = ".".repeat(
Math.floor(Number(percent) / 5)
);
elizaLogger.info(
`Downloading Florence model: [${dots.padEnd(20, " ")}] ${percent}%`
);
}
},
@@ -81,10 +88,14 @@ export class ImageDescriptionService

elizaLogger.success("Florence model downloaded successfully");

elizaLogger.info("Downloading processor...");
this.processor = (await AutoProcessor.from_pretrained(
this.modelId
)) as Florence2Processor;

elizaLogger.info("Downloading tokenizer...");
this.tokenizer = await AutoTokenizer.from_pretrained(this.modelId);
elizaLogger.success("Image service initialization complete");
}

async describeImage(
402 changes: 355 additions & 47 deletions packages/plugin-node/src/services/llama.ts

Large diffs are not rendered by default.

9 changes: 9 additions & 0 deletions pnpm-lock.yaml

0 comments on commit 986d02e

Please sign in to comment.