Skip to content

Commit

Permalink
feat: add claude and bard
Browse files Browse the repository at this point in the history
  • Loading branch information
Yidadaa committed Nov 7, 2023
1 parent 5610f42 commit cdf0311
Show file tree
Hide file tree
Showing 20 changed files with 580 additions and 394 deletions.
29 changes: 29 additions & 0 deletions app/client/anthropic/config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
export const AnthropicConfig = {
model: {
model: "claude-instant-1",
summarizeModel: "claude-instant-1",

max_tokens_to_sample: 8192,
temperature: 0.5,
top_p: 0.7,
top_k: 5,
},
provider: {
name: "Anthropic" as const,
endpoint: "https://api.anthropic.com",
apiKey: "",
customModels: "",
version: "2023-06-01",

models: [
{
name: "claude-instant-1",
available: true,
},
{
name: "claude-2",
available: true,
},
],
},
};
233 changes: 233 additions & 0 deletions app/client/anthropic/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
import { ModelConfig, ProviderConfig } from "@/app/store";
import { createLogger } from "@/app/utils/log";
import { getAuthKey } from "../common/auth";
import { API_PREFIX, AnthropicPath, ApiPath } from "@/app/constant";
import { getApiPath } from "@/app/utils/path";
import { trimEnd } from "@/app/utils/string";
import { Anthropic } from "./types";
import { ChatOptions, LLMModel, LLMUsage, RequestMessage } from "../types";
import { omit } from "@/app/utils/object";
import {
EventStreamContentType,
fetchEventSource,
} from "@fortaine/fetch-event-source";
import { prettyObject } from "@/app/utils/format";
import Locale from "@/app/locales";
import { AnthropicConfig } from "./config";

export function createAnthropicClient(
providerConfigs: ProviderConfig,
modelConfig: ModelConfig,
) {
const anthropicConfig = { ...providerConfigs.anthropic };
const logger = createLogger("[Anthropic]");
const anthropicModelConfig = { ...modelConfig.anthropic };

return {
headers() {
return {
"Content-Type": "application/json",
"x-api-key": getAuthKey(anthropicConfig.apiKey),
"anthropic-version": anthropicConfig.version,
};
},

path(path: AnthropicPath): string {
let baseUrl: string = anthropicConfig.endpoint;

// if endpoint is empty, use default endpoint
if (baseUrl.trim().length === 0) {
baseUrl = getApiPath(ApiPath.Anthropic);
}

if (!baseUrl.startsWith("http") && !baseUrl.startsWith(API_PREFIX)) {
baseUrl = "https://" + baseUrl;
}

baseUrl = trimEnd(baseUrl, "/");

return `${baseUrl}/${path}`;
},

extractMessage(res: Anthropic.ChatResponse) {
return res.completion;
},

beforeRequest(options: ChatOptions, stream = false) {
const ClaudeMapper: Record<RequestMessage["role"], string> = {
assistant: "Assistant",
user: "Human",
system: "Human",
};

const prompt = options.messages
.map((v) => ({
role: ClaudeMapper[v.role] ?? "Human",
content: v.content,
}))
.map((v) => `\n\n${v.role}: ${v.content}`)
.join("");

if (options.shouldSummarize) {
anthropicModelConfig.model = anthropicModelConfig.summarizeModel;
}

const requestBody: Anthropic.ChatRequest = {
prompt,
stream,
...omit(anthropicModelConfig, "summarizeModel"),
};

const path = this.path(AnthropicPath.Chat);

logger.log("path = ", path, requestBody);

const controller = new AbortController();
options.onController?.(controller);

const payload = {
method: "POST",
body: JSON.stringify(requestBody),
signal: controller.signal,
headers: this.headers(),
mode: "no-cors" as RequestMode,
};

return {
path,
payload,
controller,
};
},

async chat(options: ChatOptions) {
try {
const { path, payload, controller } = this.beforeRequest(
options,
false,
);

controller.signal.onabort = () => options.onFinish("");

const res = await fetch(path, payload);
const resJson = await res.json();

const message = this.extractMessage(resJson);
options.onFinish(message);
} catch (e) {
logger.error("failed to chat", e);
options.onError?.(e as Error);
}
},

async chatStream(options: ChatOptions) {
try {
const { path, payload, controller } = this.beforeRequest(options, true);

const context = {
text: "",
finished: false,
};

const finish = () => {
if (!context.finished) {
options.onFinish(context.text);
context.finished = true;
}
};

controller.signal.onabort = finish;

logger.log(payload);

fetchEventSource(path, {
...payload,
async onopen(res) {
const contentType = res.headers.get("content-type");
logger.log("response content type: ", contentType);

if (contentType?.startsWith("text/plain")) {
context.text = await res.clone().text();
return finish();
}

if (
!res.ok ||
!res.headers
.get("content-type")
?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [context.text];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}

if (res.status === 401) {
responseTexts.push(Locale.Error.Unauthorized);
}

if (extraInfo) {
responseTexts.push(extraInfo);
}

context.text = responseTexts.join("\n\n");

return finish();
}
},
onmessage(msg) {
if (msg.data === "[DONE]" || context.finished) {
return finish();
}
const chunk = msg.data;
try {
const chunkJson = JSON.parse(
chunk,
) as Anthropic.ChatStreamResponse;
const delta = chunkJson.completion;
if (delta) {
context.text += delta;
options.onUpdate?.(context.text, delta);
}
} catch (e) {
logger.error("[Request] parse error", chunk, msg);
}
},
onclose() {
finish();
},
onerror(e) {
options.onError?.(e);
},
openWhenHidden: true,
});
} catch (e) {
logger.error("failed to chat", e);
options.onError?.(e as Error);
}
},

async usage() {
return {
used: 0,
total: 0,
} as LLMUsage;
},

async models(): Promise<LLMModel[]> {
const customModels = anthropicConfig.customModels
.split(",")
.map((v) => v.trim())
.filter((v) => !!v)
.map((v) => ({
name: v,
available: true,
}));

return [...AnthropicConfig.provider.models.slice(), ...customModels];
},
};
}
24 changes: 24 additions & 0 deletions app/client/anthropic/types.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
export namespace Anthropic {
export interface ChatRequest {
model: string; // The model that will complete your prompt.
prompt: string; // The prompt that you want Claude to complete.
max_tokens_to_sample: number; // The maximum number of tokens to generate before stopping.
stop_sequences?: string[]; // Sequences that will cause the model to stop generating completion text.
temperature?: number; // Amount of randomness injected into the response.
top_p?: number; // Use nucleus sampling.
top_k?: number; // Only sample from the top K options for each subsequent token.
metadata?: object; // An object describing metadata about the request.
stream?: boolean; // Whether to incrementally stream the response using server-sent events.
}

export interface ChatResponse {
completion: string;
stop_reason: "stop_sequence" | "max_tokens";
model: string;
}

export type ChatStreamResponse = ChatResponse & {
stop?: string;
log_id: string;
};
}
11 changes: 5 additions & 6 deletions app/client/common/auth.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,23 +6,22 @@ export function bearer(value: string) {
return `Bearer ${value.trim()}`;
}

export function getAuthHeaders(apiKey = "") {
export function getAuthKey(apiKey = "") {
const accessStore = useAccessStore.getState();
const isApp = !!getClientConfig()?.isApp;

let headers: Record<string, string> = {};
let authKey = "";

if (apiKey) {
// use user's api key first
headers.Authorization = bearer(apiKey);
authKey = bearer(apiKey);
} else if (
accessStore.enabledAccessControl() &&
!isApp &&
!!accessStore.accessCode
) {
// or use access code
headers.Authorization = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
authKey = bearer(ACCESS_CODE_PREFIX + accessStore.accessCode);
}

return headers;
return authKey;
}
5 changes: 0 additions & 5 deletions app/client/common/config.ts

This file was deleted.

2 changes: 2 additions & 0 deletions app/client/core.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@ import { MaskConfig, ProviderConfig } from "../store";
import { shareToShareGPT } from "./common/share";
import { createOpenAiClient } from "./openai";
import { ChatControllerPool } from "./common/controller";
import { createAnthropicClient } from "./anthropic";

export const LLMClients = {
openai: createOpenAiClient,
anthropic: createAnthropicClient,
};

export function createLLMClient(
Expand Down
Loading

0 comments on commit cdf0311

Please sign in to comment.