From 46cb48023e6b2ffa52a44775b58a83a97dcffac2 Mon Sep 17 00:00:00 2001 From: lloydzhou Date: Fri, 2 Aug 2024 18:50:48 +0800 Subject: [PATCH] fix typescript error --- app/client/api.ts | 3 ++- app/client/platforms/openai.ts | 7 +++++-- app/components/chat.tsx | 5 +++-- app/store/config.ts | 2 ++ app/typing.ts | 2 ++ 5 files changed, 14 insertions(+), 5 deletions(-) diff --git a/app/client/api.ts b/app/client/api.ts index f10e4761887..88157e79cc7 100644 --- a/app/client/api.ts +++ b/app/client/api.ts @@ -6,7 +6,7 @@ import { ServiceProvider, } from "../constant"; import { ChatMessage, ModelType, useAccessStore, useChatStore } from "../store"; -import { ChatGPTApi } from "./platforms/openai"; +import { ChatGPTApi, DalleRequestPayload } from "./platforms/openai"; import { GeminiProApi } from "./platforms/google"; import { ClaudeApi } from "./platforms/anthropic"; import { ErnieApi } from "./platforms/baidu"; @@ -42,6 +42,7 @@ export interface LLMConfig { stream?: boolean; presence_penalty?: number; frequency_penalty?: number; + size?: DalleRequestPayload["size"]; } export interface ChatOptions { diff --git a/app/client/platforms/openai.ts b/app/client/platforms/openai.ts index 28de30051ea..54309e29f7e 100644 --- a/app/client/platforms/openai.ts +++ b/app/client/platforms/openai.ts @@ -13,6 +13,7 @@ import { useAccessStore, useAppConfig, useChatStore } from "@/app/store"; import { collectModelsWithDefaultModel } from "@/app/utils/model"; import { preProcessImageContent } from "@/app/utils/chat"; import { cloudflareAIGatewayUrl } from "@/app/utils/cloudflare"; +import { DalleSize } from "@/app/typing"; import { ChatOptions, @@ -63,7 +64,7 @@ export interface DalleRequestPayload { model: string; prompt: string; n: number; - size: "1024x1024" | "1792x1024" | "1024x1792"; + size: DalleSize; } export class ChatGPTApi implements LLMApi { @@ -141,7 +142,9 @@ export class ChatGPTApi implements LLMApi { const isDalle3 = _isDalle3(options.config.model); if (isDalle3) { - const prompt = getMessageTextContent(options.messages.slice(-1)?.pop()); + const prompt = getMessageTextContent( + options.messages.slice(-1)?.pop() as any, + ); requestPayload = { model: options.config.model, prompt, diff --git a/app/components/chat.tsx b/app/components/chat.tsx index b95e85d45df..67ea80c4a85 100644 --- a/app/components/chat.tsx +++ b/app/components/chat.tsx @@ -69,6 +69,7 @@ import { uploadImage as uploadImageRemote } from "@/app/utils/chat"; import dynamic from "next/dynamic"; import { ChatControllerPool } from "../client/controller"; +import { DalleSize } from "../typing"; import { Prompt, usePromptStore } from "../store/prompt"; import Locale from "../locales"; @@ -484,9 +485,9 @@ export function ChatActions(props: { const [showUploadImage, setShowUploadImage] = useState(false); const [showSizeSelector, setShowSizeSelector] = useState(false); - const dalle3Sizes = ["1024x1024", "1792x1024", "1024x1792"]; + const dalle3Sizes: DalleSize[] = ["1024x1024", "1792x1024", "1024x1792"]; const currentSize = - chatStore.currentSession().mask.modelConfig?.size || "1024x1024"; + chatStore.currentSession().mask.modelConfig?.size ?? "1024x1024"; useEffect(() => { const show = isVisionModel(currentModel); diff --git a/app/store/config.ts b/app/store/config.ts index 1eaafe12b1d..705a9d87c40 100644 --- a/app/store/config.ts +++ b/app/store/config.ts @@ -1,4 +1,5 @@ import { LLMModel } from "../client/api"; +import { DalleSize } from "../typing"; import { getClientConfig } from "../config/client"; import { DEFAULT_INPUT_TEMPLATE, @@ -60,6 +61,7 @@ export const DEFAULT_CONFIG = { compressMessageLengthThreshold: 1000, enableInjectSystemPrompts: true, template: config?.template ?? DEFAULT_INPUT_TEMPLATE, + size: "1024x1024" as DalleSize, }, }; diff --git a/app/typing.ts b/app/typing.ts index b09722ab902..86320358157 100644 --- a/app/typing.ts +++ b/app/typing.ts @@ -7,3 +7,5 @@ export interface RequestMessage { role: MessageRole; content: string; } + +export type DalleSize = "1024x1024" | "1792x1024" | "1024x1792";