diff --git a/service/.env.example b/service/.env.example index d6f2ba5929..1eb1dbf7b1 100644 --- a/service/.env.example +++ b/service/.env.example @@ -1,6 +1,9 @@ # OpenAI API Key - https://platform.openai.com/overview OPENAI_API_KEY= +# OpenAI API Key Backup +OPENAI_API_KEY_BACKUP= + # change this to an `accessToken` extracted from the ChatGPT site's `https://chat.openai.com/api/auth/session` response OPENAI_ACCESS_TOKEN= diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index 9f3b31068a..080f206ef7 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -1,3 +1,5 @@ +// noinspection JSIgnoredPromiseFromCall + import * as dotenv from 'dotenv' import 'isomorphic-fetch' import type { ChatGPTAPIOptions, ChatMessage, SendMessageOptions } from 'chatgpt' @@ -28,22 +30,23 @@ const timeoutMs: number = !isNaN(+process.env.TIMEOUT_MS) ? +process.env.TIMEOUT const disableDebug: boolean = process.env.OPENAI_API_DISABLE_DEBUG === 'true' let apiModel: ApiModel +let apiKey = process.env.OPENAI_API_KEY -if (!isNotEmptyString(process.env.OPENAI_API_KEY) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN)) +if (!isNotEmptyString(apiKey) && !isNotEmptyString(process.env.OPENAI_ACCESS_TOKEN)) throw new Error('Missing OPENAI_API_KEY or OPENAI_ACCESS_TOKEN environment variable') let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI -(async () => { +async function initApi() { // More Info: https://github.com/transitive-bullshit/chatgpt-api - if (isNotEmptyString(process.env.OPENAI_API_KEY)) { + if (isNotEmptyString(apiKey)) { const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL const OPENAI_API_MODEL = process.env.OPENAI_API_MODEL const model = isNotEmptyString(OPENAI_API_MODEL) ? OPENAI_API_MODEL : 'gpt-3.5-turbo' const options: ChatGPTAPIOptions = { - apiKey: process.env.OPENAI_API_KEY, + apiKey, completionParams: { model }, debug: !disableDebug, } @@ -86,10 +89,12 @@ let api: ChatGPTAPI | ChatGPTUnofficialProxyAPI api = new ChatGPTUnofficialProxyAPI({ ...options }) apiModel = 'ChatGPTUnofficialProxyAPI' } -})() +} + +initApi() async function chatReplyProcess(options: RequestOptions) { - const { lastContext, process, systemMessage } = options + const { lastContext, onProgressProcess, systemMessage } = options let message = options.message try { if (message.startsWith('/') && message.slice(message.length - 5) === '11514') { @@ -119,7 +124,7 @@ async function chatReplyProcess(options: RequestOptions) { const response = await api.sendMessage(message, { ...options, onProgress: (partialResponse) => { - process?.(partialResponse) + onProgressProcess?.(partialResponse) }, }) @@ -127,6 +132,12 @@ async function chatReplyProcess(options: RequestOptions) { } catch (error: any) { const code = error.statusCode + if (apiModel === 'ChatGPTAPI' && code === 401 && apiKey !== process.env.OPENAI_API_KEY_BACKUP) { + apiKey = process.env.OPENAI_API_KEY_BACKUP + await initApi() + return chatReplyProcess({ ...options, lastContext: null }) + } + global.console.log(error) if (Reflect.has(ErrorCodeMessage, code)) return sendResponse({ type: 'Fail', message: ErrorCodeMessage[code] }) @@ -135,7 +146,7 @@ async function chatReplyProcess(options: RequestOptions) { } async function fetchBalance() { - const OPENAI_API_KEY = process.env.OPENAI_API_KEY + const OPENAI_API_KEY = apiKey const OPENAI_API_BASE_URL = process.env.OPENAI_API_BASE_URL if (!isNotEmptyString(OPENAI_API_KEY)) diff --git a/service/src/chatgpt/types.ts b/service/src/chatgpt/types.ts index 1e65f4ca39..1e4b05d295 100644 --- a/service/src/chatgpt/types.ts +++ b/service/src/chatgpt/types.ts @@ -3,6 +3,6 @@ import type { ChatMessage } from 'chatgpt' export interface RequestOptions { message: string lastContext?: { conversationId?: string; parentMessageId?: string } - process?: (chat: ChatMessage) => void + onProgressProcess?: (chat: ChatMessage) => void systemMessage?: string } diff --git a/service/src/index.ts b/service/src/index.ts index 28041d1c70..9a4c50b750 100644 --- a/service/src/index.ts +++ b/service/src/index.ts @@ -28,7 +28,7 @@ router.post('/chat-process', [auth, limiter], async (req, res) => { await chatReplyProcess({ message: prompt, lastContext: options, - process: (chat: ChatMessage) => { + onProgressProcess: (chat: ChatMessage) => { res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`) firstChunk = false },