Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: 支持长回复 #450

Merged
merged 7 commits into from
Mar 10, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,6 @@
VITE_GLOB_API_URL=/api

VITE_APP_API_BASE_URL=http://localhost:3002/

# Whether long replies are supported, which may result in higher API fees
VITE_GLOB_OPEN_LONG_REPLY=false
2 changes: 1 addition & 1 deletion service/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"common:cleanup": "rimraf node_modules && rimraf pnpm-lock.yaml"
},
"dependencies": {
"chatgpt": "^5.0.8",
"chatgpt": "^5.0.9",
"dotenv": "^16.0.3",
"esno": "^0.16.3",
"express": "^4.18.2",
Expand Down
8 changes: 4 additions & 4 deletions service/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions service/src/chatgpt/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ async function chatReplyProcess(
lastContext?: { conversationId?: string; parentMessageId?: string },
process?: (chat: ChatMessage) => void,
) {
if (!message)
return sendResponse({ type: 'Fail', message: 'Message is empty' })
// if (!message)
// return sendResponse({ type: 'Fail', message: 'Message is empty' })

try {
let options: SendMessageOptions = { timeoutMs }
Expand Down
162 changes: 94 additions & 68 deletions src/views/chat/index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import { t } from '@/locales'

let controller = new AbortController()

const openLongReply = import.meta.env.VITE_GLOB_OPEN_LONG_REPLY === 'true'

const route = useRoute()
const dialog = useDialog()
const ms = useMessage()
Expand All @@ -40,7 +42,7 @@ function handleSubmit() {
}

async function onConversation() {
const message = prompt.value
let message = prompt.value

if (loading.value)
return
Expand Down Expand Up @@ -87,40 +89,53 @@ async function onConversation() {
scrollToBottom()

try {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
options,
signal: controller.signal,
onDownloadProgress: ({ event }) => {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n')
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
dataSources.value.length - 1,
{
dateTime: new Date().toLocaleString(),
text: data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, options: { ...options } },
},
)
scrollToBottom()
}
catch (error) {
let lastText = ''
const fetchChatAPIOnce = async () => {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
options,
signal: controller.signal,
onDownloadProgress: ({ event }) => {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n')
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
dataSources.value.length - 1,
{
dateTime: new Date().toLocaleString(),
text: lastText + data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, options: { ...options } },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
lastText = data.text
message = ''
return fetchChatAPIOnce()
}

scrollToBottom()
}
catch (error) {
//
}
},
})
}
},
})
}

await fetchChatAPIOnce()
}
catch (error: any) {
const errorMessage = error?.message ?? t('common.wrong')
Expand Down Expand Up @@ -180,7 +195,7 @@ async function onRegenerate(index: number) {

const { requestOptions } = dataSources.value[index]

const message = requestOptions?.prompt ?? ''
let message = requestOptions?.prompt ?? ''

let options: Chat.ConversationRequest = {}

Expand All @@ -204,39 +219,50 @@ async function onRegenerate(index: number) {
)

try {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
options,
signal: controller.signal,
onDownloadProgress: ({ event }) => {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n')
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
index,
{
dateTime: new Date().toLocaleString(),
text: data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, ...options },
},
)
}
catch (error) {
//
}
},
})
let lastText = ''
const fetchChatAPIOnce = async () => {
await fetchChatAPIProcess<Chat.ConversationResponse>({
prompt: message,
options,
signal: controller.signal,
onDownloadProgress: ({ event }) => {
const xhr = event.target
const { responseText } = xhr
// Always process the final line
const lastIndex = responseText.lastIndexOf('\n')
let chunk = responseText
if (lastIndex !== -1)
chunk = responseText.substring(lastIndex)
try {
const data = JSON.parse(chunk)
updateChat(
+uuid,
index,
{
dateTime: new Date().toLocaleString(),
text: lastText + data.text ?? '',
inversion: false,
error: false,
loading: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
requestOptions: { prompt: message, ...options },
},
)

if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
lastText = data.text
message = ''
return fetchChatAPIOnce()
}
}
catch (error) {
//
}
},
})
}
await fetchChatAPIOnce()
}
catch (error: any) {
if (error.message === 'canceled') {
Expand Down