-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add LLM service to standardize LLM calls
- Loading branch information
1 parent
d397f04
commit f183f75
Showing
11 changed files
with
461 additions
and
16 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,5 +7,8 @@ | |
}, | ||
"devDependencies": { | ||
"vitest": "^2.1.8" | ||
}, | ||
"dependencies": { | ||
"openai": "^4.76.0" | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
# LLM | ||
|
||
## Overview | ||
The LLM service provides a unified interface for interacting with various Large Language Model providers (OpenAI, Anthropic, Google). Currently implements OpenAI with future support planned for other providers. | ||
|
||
## Configuration | ||
### LLMServiceConfig | ||
- `provider`: Optional string, defaults to 'openai' | ||
- Supported values: 'openai', Future: ['anthropic', 'google'] | ||
- `apiKey`: Required string | ||
- Authentication key for the selected provider | ||
- `defaultModel`: Optional string, defaults to 'gpt-4o' | ||
- Model identifier to use for requests | ||
- `logger`: Optional Console interface, defaults to global console | ||
- Used for error logging and debugging | ||
|
||
### Call Method | ||
**Input Parameters:** | ||
- `messages`: Array of chat completion messages | ||
- `tools`: Optional array of chat completion tools | ||
|
||
**Returns:** | ||
- Promise<LLMResult> containing either: | ||
- `{ content: string }` for standard responses | ||
- `{ content: string | null, toolsCall: array }` for tool-based responses | ||
|
||
## Response Formats | ||
### Standard Response | ||
```typescript | ||
{ | ||
content: string // JSON-parsed content from LLM | ||
} | ||
``` | ||
|
||
### Tool Call Response | ||
```typescript | ||
{ | ||
content: string | null, | ||
toolsCall: Array<{ | ||
id: string, | ||
type: string, | ||
function: { | ||
name: string, | ||
arguments: string | ||
} | ||
}> | ||
} | ||
``` | ||
|
||
## Error Handling | ||
### Error Types | ||
- `ContentParsingError`: JSON parsing failures | ||
- `InvalidProviderError`: Unsupported or invalid providers | ||
- `LLMModelError`: Invalid model specifications | ||
- `ProviderError`: API communication errors | ||
|
||
|
||
## Notes | ||
- Currently only supports OpenAI | ||
- Requires JSON response format | ||
- Todo: Streaming support |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,163 @@ | ||
import OpenAI from 'openai'; | ||
import { LLMResult, LLMServiceConfig, Provider } from '../types'; | ||
import { ContentParsingError, InvalidProviderError, LLMModelError, ProviderError } from '../errors'; | ||
import { ensureError } from '../utils'; | ||
|
||
|
||
const DEFAULT_MODEL = 'gpt-4o' as const; | ||
|
||
export class LLM { | ||
private readonly client: OpenAI; | ||
private readonly model: string; | ||
private readonly logger: Console; | ||
private readonly provider: Provider; | ||
|
||
constructor({ | ||
provider = 'openai', | ||
apiKey, | ||
defaultModel = DEFAULT_MODEL, | ||
logger = console | ||
}: LLMServiceConfig) { | ||
this.provider = this.validateProvider(provider); | ||
this.client = this.initializeClient(apiKey); | ||
this.model = this.validateModel(defaultModel); | ||
this.logger = logger; | ||
} | ||
|
||
async call( | ||
messages: OpenAI.Chat.ChatCompletionMessageParam[], | ||
tools: OpenAI.Chat.ChatCompletionTool[] = [], | ||
): Promise<LLMResult> { | ||
try { | ||
switch (this.provider) { | ||
case 'openai': | ||
return this.callOpenAI(messages, tools, this.model); | ||
case 'anthropic': | ||
throw new InvalidProviderError('Anthropic support not yet implemented'); | ||
case 'google': | ||
throw new InvalidProviderError('Google support not yet implemented'); | ||
default: | ||
throw new InvalidProviderError(`Unsupported provider: ${this.provider}`); | ||
} | ||
} catch (error) { | ||
return this.handleError(error); | ||
} | ||
} | ||
|
||
private async callOpenAI( | ||
messages: OpenAI.Chat.ChatCompletionMessageParam[], | ||
tools: OpenAI.Chat.ChatCompletionTool[], | ||
model: string | ||
): Promise<LLMResult> { | ||
try { | ||
const response = await this.client.chat.completions.create({ | ||
model, | ||
messages, | ||
response_format: { type: "json_object" }, | ||
tools: tools?.length ? tools : undefined, | ||
}); | ||
|
||
const message = response.choices[0].message; | ||
return this.prepareResult(message); | ||
} catch (error) { | ||
throw new ProviderError( | ||
`Failed to call OpenAI API: ${ensureError(error).message}`, | ||
); | ||
} | ||
} | ||
|
||
private prepareResult(message: OpenAI.Chat.ChatCompletionMessage): LLMResult { | ||
if (message.tool_calls) { | ||
return this.prepareToolCallResult(message); | ||
} | ||
return this.prepareContentResult(message.content); | ||
} | ||
|
||
private prepareToolCallResult(message: OpenAI.Chat.ChatCompletionMessage): LLMResult { | ||
return { | ||
toolsCall: message.tool_calls, | ||
content: message.content | ||
}; | ||
} | ||
|
||
private prepareContentResult(content: string | null | undefined): LLMResult { | ||
try { | ||
const trimmedContent = content?.trim() ?? ""; | ||
const parsed = this.parseJsonContent(trimmedContent); | ||
return { content: parsed.content }; | ||
} catch (error) { | ||
throw new ContentParsingError( | ||
`Failed to prepare content result: ${ensureError(error).message}` | ||
); | ||
} | ||
} | ||
|
||
private parseJsonContent(content: string): { content: string } { | ||
try { | ||
return JSON.parse(content); | ||
} catch (error) { | ||
throw new ContentParsingError( | ||
`Failed to parse JSON content: ${ensureError(error).message}` | ||
); | ||
} | ||
} | ||
|
||
private initializeClient(apiKey: string): OpenAI { | ||
try { | ||
switch (this.provider) { | ||
case 'openai': | ||
return new OpenAI({ apiKey }); | ||
case 'anthropic': | ||
throw new Error('Anthropic support is not yet implemented'); | ||
case 'google': | ||
throw new Error('Google support is not yet implemented'); | ||
default: | ||
throw new Error(`Unsupported provider: ${this.provider}`); | ||
} | ||
} catch (error) { | ||
throw new InvalidProviderError( | ||
`Failed to initialize ${this.provider} client: ${ensureError(error).message}` | ||
); | ||
} | ||
} | ||
|
||
private validateProvider(provider: Provider): Provider { | ||
const validProviders: Provider[] = ['openai', 'anthropic', 'google']; | ||
if (!validProviders.includes(provider)) { | ||
throw new InvalidProviderError( | ||
`Invalid provider. Must be one of: ${validProviders.join(', ')}` | ||
); | ||
} | ||
return provider; | ||
} | ||
|
||
private validateModel(model: string): string { | ||
if (!model || typeof model !== 'string' || model.trim().length === 0) { | ||
throw new LLMModelError('Model name must be a non-empty string'); | ||
} | ||
return model.trim(); | ||
} | ||
|
||
private handleError(error: unknown): LLMResult { | ||
const normalizedError = ensureError(error); | ||
let errorMessage: string; | ||
|
||
if (error instanceof ContentParsingError) { | ||
errorMessage = `Content parsing error: ${normalizedError.message}`; | ||
} else if (error instanceof ProviderError) { | ||
errorMessage = `Provider error: ${normalizedError.message}`; | ||
} else if (error instanceof LLMModelError) { | ||
errorMessage = `LLM model error: ${normalizedError.message}`; | ||
} else { | ||
errorMessage = `Unexpected error: ${normalizedError.message}`; | ||
} | ||
|
||
this.logger.error({ | ||
message: errorMessage, | ||
timestamp: new Date().toISOString(), | ||
stack: normalizedError.stack, | ||
}); | ||
|
||
return { content: "An error occurred while processing your request. ${errorMessage}" }; | ||
} | ||
} |
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
import OpenAI from "openai"; | ||
|
||
export type FunctionInput = { | ||
type: string; | ||
description: string; | ||
required?: boolean; | ||
}; | ||
|
||
export type Provider = 'openai' | 'anthropic' | 'google'; | ||
|
||
export interface LLMResult { | ||
content?: string | null; | ||
toolsCall?: OpenAI.Chat.ChatCompletionMessage['tool_calls']; | ||
} | ||
|
||
export interface LLMServiceConfig { | ||
provider?: Provider; | ||
apiKey: string; | ||
defaultModel?: string; | ||
logger?: Console; | ||
} |
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.