Skip to content

Commit

Permalink
fix typescript error
Browse files Browse the repository at this point in the history
  • Loading branch information
lloydzhou committed Aug 2, 2024
1 parent 1c24ca5 commit 46cb480
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 5 deletions.
3 changes: 2 additions & 1 deletion app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
ServiceProvider,
} from "../constant";
import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store";
import { ChatGPTApi } from "./platforms/openai";
import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai";
import { GeminiProApi } from "./platforms/google";
import { ClaudeApi } from "./platforms/anthropic";
import { ErnieApi } from "./platforms/baidu";
Expand Down Expand Up @@ -42,6 +42,7 @@ export interface LLMConfig {
stream?: boolean;
presence_penalty?: number;
frequency_penalty?: number;
size?: DalleRequestPayload["size"];
}

export interface ChatOptions {
Expand Down
7 changes: 5 additions & 2 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store";
import { collectModelsWithDefaultModel } from "@/app/utils/model";
import { preProcessImageContent } from "@/app/utils/chat";
import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare";
import { DalleSize } from "@/app/typing";

import {
ChatOptions,
Expand Down Expand Up @@ -63,7 +64,7 @@ export interface DalleRequestPayload {
model: string;
prompt: string;
n: number;
size: "1024x1024" | "1792x1024" | "1024x1792";
size: DalleSize;
}

export class ChatGPTApi implements LLMApi {
Expand Down Expand Up @@ -141,7 +142,9 @@ export class ChatGPTApi implements LLMApi {

const isDalle3 = _isDalle3(options.config.model);
if (isDalle3) {
const prompt = getMessageTextContent(options.messages.slice(-1)?.pop());
const prompt = getMessageTextContent(
options.messages.slice(-1)?.pop() as any,
);
requestPayload = {
model: options.config.model,
prompt,
Expand Down
5 changes: 3 additions & 2 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ import { uploadImage as uploadImageRemote } from "@/app/utils/chat";
import dynamic from "next/dynamic";

import { ChatControllerPool } from "../client/controller";
import { DalleSize } from "../typing";
import { Prompt, usePromptStore } from "../store/prompt";
import Locale from "../locales";

Expand Down Expand Up @@ -484,9 +485,9 @@ export function ChatActions(props: {
const [showUploadImage, setShowUploadImage] = useState(false);

const [showSizeSelector, setShowSizeSelector] = useState(false);
const dalle3Sizes = ["1024x1024", "1792x1024", "1024x1792"];
const dalle3Sizes: DalleSize[] = ["1024x1024", "1792x1024", "1024x1792"];
const currentSize =
chatStore.currentSession().mask.modelConfig?.size || "1024x1024";
chatStore.currentSession().mask.modelConfig?.size ?? "1024x1024";

useEffect(() => {
const show = isVisionModel(currentModel);
Expand Down
2 changes: 2 additions & 0 deletions app/store/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { LLMModel } from "../client/api";
import { DalleSize } from "../typing";
import { getClientConfig } from "../config/client";
import {
DEFAULT_INPUT_TEMPLATE,
Expand Down Expand Up @@ -60,6 +61,7 @@ export const DEFAULT_CONFIG = {
compressMessageLengthThreshold: 1000,
enableInjectSystemPrompts: true,
template: config?.template ?? DEFAULT_INPUT_TEMPLATE,
size: "1024x1024" as DalleSize,
},
};

Expand Down
2 changes: 2 additions & 0 deletions app/typing.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,5 @@ export interface RequestMessage {
role: MessageRole;
content: string;
}

export type DalleSize = "1024x1024" | "1792x1024" | "1024x1792";

0 comments on commit 46cb480

Please sign in to comment.