feat: 支持长回复 (#450)

* chore: rename environment variables files

* docs: update README.md about .env file

* feat: support long reply

* chore: upgrade chatgpt package and set long reply to false default

* chore: set long reply to false default
This commit is contained in:
Yige 2023-03-10 13:23:22 +08:00 committed by GitHub
parent 133a24e25f
commit 076c56d1d9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 99 additions and 70 deletions

3
.env
View File

@ -2,3 +2,6 @@
VITE_GLOB_API_URL=/api VITE_GLOB_API_URL=/api
VITE_APP_API_BASE_URL=http://localhost:3002/ VITE_APP_API_BASE_URL=http://localhost:3002/
# Whether long replies are supported, which may result in higher API fees
VITE_GLOB_OPEN_LONG_REPLY=false

View File

@ -87,8 +87,8 @@ async function chatReplyProcess(
lastContext?: { conversationId?: string; parentMessageId?: string }, lastContext?: { conversationId?: string; parentMessageId?: string },
process?: (chat: ChatMessage) => void, process?: (chat: ChatMessage) => void,
) { ) {
if (!message) // if (!message)
return sendResponse({ type: 'Fail', message: 'Message is empty' }) // return sendResponse({ type: 'Fail', message: 'Message is empty' })
try { try {
let options: SendMessageOptions = { timeoutMs } let options: SendMessageOptions = { timeoutMs }

View File

@ -15,6 +15,8 @@ import { t } from '@/locales'
let controller = new AbortController() let controller = new AbortController()
const openLongReply = import.meta.env.VITE_GLOB_OPEN_LONG_REPLY === 'true'
const route = useRoute() const route = useRoute()
const dialog = useDialog() const dialog = useDialog()
const ms = useMessage() const ms = useMessage()
@ -41,7 +43,7 @@ function handleSubmit() {
} }
async function onConversation() { async function onConversation() {
const message = prompt.value let message = prompt.value
if (loading.value) if (loading.value)
return return
@ -88,40 +90,53 @@ async function onConversation() {
scrollToBottom() scrollToBottom()
try { try {
await fetchChatAPIProcess<Chat.ConversationResponse>({ let lastText = ''
prompt: message, const fetchChatAPIOnce = async () => {
options, await fetchChatAPIProcess<Chat.ConversationResponse>({
signal: controller.signal, prompt: message,
onDownloadProgress: ({ event }) => { options,
const xhr = event.target signal: controller.signal,
const { responseText } = xhr onDownloadProgress: ({ event }) => {
// Always process the final line const xhr = event.target
const lastIndex = responseText.lastIndexOf('\n') const { responseText } = xhr
let chunk = responseText // Always process the final line
if (lastIndex !== -1) const lastIndex = responseText.lastIndexOf('\n')
chunk = responseText.substring(lastIndex) let chunk = responseText
try { if (lastIndex !== -1)
const data = JSON.parse(chunk) chunk = responseText.substring(lastIndex)
updateChat( try {
+uuid, const data = JSON.parse(chunk)
dataSources.value.length - 1, updateChat(
{ +uuid,
dateTime: new Date().toLocaleString(), dataSources.value.length - 1,
text: data.text ?? '', {
inversion: false, dateTime: new Date().toLocaleString(),
error: false, text: lastText + data.text ?? '',
loading: false, inversion: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, error: false,
requestOptions: { prompt: message, options: { ...options } }, loading: false,
}, conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
) requestOptions: { prompt: message, options: { ...options } },
scrollToBottom() },
} )
catch (error) {
if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
options.parentMessageId = data.id
lastText = data.text
message = ''
return fetchChatAPIOnce()
}
scrollToBottom()
}
catch (error) {
// //
} }
}, },
}) })
}
await fetchChatAPIOnce()
} }
catch (error: any) { catch (error: any) {
const errorMessage = error?.message ?? t('common.wrong') const errorMessage = error?.message ?? t('common.wrong')
@ -181,7 +196,7 @@ async function onRegenerate(index: number) {
const { requestOptions } = dataSources.value[index] const { requestOptions } = dataSources.value[index]
const message = requestOptions?.prompt ?? '' let message = requestOptions?.prompt ?? ''
let options: Chat.ConversationRequest = {} let options: Chat.ConversationRequest = {}
@ -205,39 +220,50 @@ async function onRegenerate(index: number) {
) )
try { try {
await fetchChatAPIProcess<Chat.ConversationResponse>({ let lastText = ''
prompt: message, const fetchChatAPIOnce = async () => {
options, await fetchChatAPIProcess<Chat.ConversationResponse>({
signal: controller.signal, prompt: message,
onDownloadProgress: ({ event }) => { options,
const xhr = event.target signal: controller.signal,
const { responseText } = xhr onDownloadProgress: ({ event }) => {
// Always process the final line const xhr = event.target
const lastIndex = responseText.lastIndexOf('\n') const { responseText } = xhr
let chunk = responseText // Always process the final line
if (lastIndex !== -1) const lastIndex = responseText.lastIndexOf('\n')
chunk = responseText.substring(lastIndex) let chunk = responseText
try { if (lastIndex !== -1)
const data = JSON.parse(chunk) chunk = responseText.substring(lastIndex)
updateChat( try {
+uuid, const data = JSON.parse(chunk)
index, updateChat(
{ +uuid,
dateTime: new Date().toLocaleString(), index,
text: data.text ?? '', {
inversion: false, dateTime: new Date().toLocaleString(),
error: false, text: lastText + data.text ?? '',
loading: false, inversion: false,
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, error: false,
requestOptions: { prompt: message, ...options }, loading: false,
}, conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
) requestOptions: { prompt: message, ...options },
} },
catch (error) { )
//
} if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
}, options.parentMessageId = data.id
}) lastText = data.text
message = ''
return fetchChatAPIOnce()
}
}
catch (error) {
//
}
},
})
}
await fetchChatAPIOnce()
} }
catch (error: any) { catch (error: any) {
if (error.message === 'canceled') { if (error.message === 'canceled') {