Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fixed lib path, now can use custom anthropic client #31

Merged
merged 2 commits into from
Feb 9, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ export interface AiCoder {
}
```

*Note: The `createAiCoder` function now accepts an optional `anthropicClient` parameter to override the default Anthropic client. This allows you to provide a custom client (for example, with different API key configuration) when using AiCoder in environments like the browser.*

The AI Coder supports streaming of AI responses and notifying you when the virtual file system (VFS) is updated. To achieve this, you can pass two callback functions when creating an AiCoder instance:
- **onStreamedChunk**: A callback function that receives streamed chunks from the AI. This is useful for logging or updating a UI with gradual progress.
- **onVfsChanged**: A callback function that is invoked whenever the VFS is updated with new content. This is useful for refreshing a file view or triggering further processing.
Expand Down
Binary file modified bun.lockb
Binary file not shown.
48 changes: 28 additions & 20 deletions lib/ai/aiCoder.ts
Original file line number Diff line number Diff line change
@@ -1,29 +1,38 @@
import { EventEmitter } from "node:events"
import type Anthropic from "@anthropic-ai/sdk"
import { runAiWithErrorCorrection } from "./run-ai-with-error-correction"
import { createLocalCircuitPrompt } from "lib/prompt-templates/create-local-circuit-prompt"

export interface AiCoderEvents {
streamedChunk: string
vfsChanged: undefined
}

export interface AiCoder {
onStreamedChunk: (chunk: string) => void
onVfsChanged: () => void
vfs: { [filepath: string]: string }
availableOptions: { name: string; options: string[] }[]
submitPrompt: (
prompt: string,
options?: { selectedMicrocontroller?: string },
) => Promise<void>
on<K extends keyof AiCoderEvents>(
event: K,
listener: (payload: AiCoderEvents[K]) => void,
): this
}

export class AiCoderImpl implements AiCoder {
onStreamedChunk: (chunk: string) => void
onVfsChanged: () => void
export class AiCoderImpl extends EventEmitter implements AiCoder {
vfs: { [filepath: string]: string } = {}
availableOptions = [{ name: "microController", options: ["pico", "esp32"] }]
anthropicClient: Anthropic | undefined

constructor(
onStreamedChunk: (chunk: string) => void,
onVfsChanged: () => void,
) {
this.onStreamedChunk = onStreamedChunk
this.onVfsChanged = onVfsChanged
constructor({
anthropicClient,
}: {
anthropicClient?: Anthropic
}) {
super()
this.anthropicClient = anthropicClient
}

async submitPrompt(
Expand All @@ -40,28 +49,27 @@ export class AiCoderImpl implements AiCoder {
promptNumber,
maxAttempts: 4,
previousAttempts: [],
onVfsChanged: this.onVfsChanged,
onStream: (chunk: string) => {
if (!streamStarted) {
this.onStreamedChunk("Creating a tscircuit local circuit...")
this.emit("streamedChunk", "Creating a tscircuit local circuit...")
streamStarted = true
}
currentAttempt += chunk
this.onStreamedChunk(chunk)
this.emit("streamedChunk", chunk)
},
onVfsChanged: () => {
this.emit("vfsChanged")
},
vfs: this.vfs,
})
if (result.code) {
const filepath = `prompt-${promptNumber}-attempt-final.tsx`
this.vfs[filepath] = result.code
this.onVfsChanged()
this.emit("vfsChanged")
}
}
}

export const createAiCoder = (
onStreamedChunk: (chunk: string) => void,
onVfsChanged: () => void,
): AiCoder => {
return new AiCoderImpl(onStreamedChunk, onVfsChanged)
export const createAiCoder = (anthropicClient?: Anthropic): AiCoder => {
return new AiCoderImpl({ anthropicClient })
}
15 changes: 12 additions & 3 deletions lib/ai/anthropic.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,16 @@
import Anthropic from "@anthropic-ai/sdk"
import dotenv from "dotenv"
dotenv.config()

let apiKey = ""
if (
typeof process !== "undefined" &&
process.env &&
process.env.ANTHROPIC_API_KEY
) {
import("dotenv").then((dotenv) => dotenv.config())
apiKey = process.env.ANTHROPIC_API_KEY
}

export const anthropic = new Anthropic({
apiKey: process.env.ANTHROPIC_API_KEY,
apiKey,
dangerouslyAllowBrowser: true,
})
5 changes: 4 additions & 1 deletion lib/ai/ask-ai-with-previous-attempts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,15 @@ export const askAiWithPreviousAttempts = async ({
systemPrompt,
previousAttempts,
onStream,
anthropicClient,
}: {
prompt: string
systemPrompt: string
previousAttempts?: AttemptHistory[]
onStream?: (chunk: string) => void
anthropicClient?: typeof anthropic
}): Promise<string> => {
const client = anthropicClient || anthropic
const messages: { role: "assistant" | "user"; content: string }[] = [
{ role: "user", content: prompt },
]
Expand Down Expand Up @@ -50,7 +53,7 @@ export const askAiWithPreviousAttempts = async ({
onStream(
`Start streaming AI response, attempt: ${(previousAttempts?.length || 0) + 1}`,
)
const completionStream = await anthropic.messages.create({
const completionStream = await client.messages.create({
model: "claude-3-5-haiku-20241022",
max_tokens: 2048,
system: systemPrompt,
Expand Down
4 changes: 4 additions & 0 deletions lib/ai/run-ai-with-error-correction.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { safeEvaluateCode } from "lib/code-runner"
import { askAiWithPreviousAttempts } from "./ask-ai-with-previous-attempts"
import { saveAttemptLog } from "lib/utils/save-attempt"
import type Anthropic from "@anthropic-ai/sdk"
const createAttemptFile = ({
fileName,
prompt,
Expand Down Expand Up @@ -45,6 +46,7 @@ export const runAiWithErrorCorrection = async ({
onStream,
onVfsChanged,
vfs,
anthropicClient,
}: {
attempt?: number
logsDir?: string
Expand All @@ -56,6 +58,7 @@ export const runAiWithErrorCorrection = async ({
onStream?: (chunk: string) => void
onVfsChanged?: () => void
vfs?: Record<string, string>
anthropicClient?: Anthropic
}): Promise<{
code: string
codeBlock: string
Expand All @@ -66,6 +69,7 @@ export const runAiWithErrorCorrection = async ({
systemPrompt,
previousAttempts,
onStream,
anthropicClient,
})
const codeMatch = aiResponse.match(/```tsx\s*([\s\S]*?)\s*```/)
const code = codeMatch ? codeMatch[1].trim() : ""
Expand Down
12 changes: 6 additions & 6 deletions tests/tscircuit-circuit-coder/aiCoder.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ import { expect, test } from "bun:test"
test("AiCoder submitPrompt streams and updates vfs", async () => {
const streamedChunks: string[] = []
let vfsUpdated = false
const onStreamedChunk = (chunk: string) => {
const aiCoder = createAiCoder()
aiCoder.on("streamedChunk", (chunk: string) => {
console.log(chunk)
streamedChunks.push(chunk)
}
const onVfsChanged = () => {
})
aiCoder.on("vfsChanged", () => {
vfsUpdated = true
}

const aiCoder = createAiCoder(onStreamedChunk, onVfsChanged)
})

await aiCoder.submitPrompt(
"create a random complicated circuit that does something cool",
Expand Down
3 changes: 2 additions & 1 deletion tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
"paths": {
"src/*": ["src/*"],
"tests/*": ["tests/*"],
"prompt-templates/*": ["lib/prompt-templates/*"]
"prompt-templates/*": ["lib/prompt-templates/*"],
"lib/*": ["lib/*"]
},

// Best practices
Expand Down