perf: optimized output (#962)
* Update index.ts * Update index.vue * Update index.ts
This commit is contained in:
parent
c0a9fd5208
commit
78bcf7f4ce
|
@ -25,12 +25,21 @@ router.post('/chat-process', [auth, limiter], async (req, res) => {
|
|||
try {
|
||||
const { prompt, options = {}, systemMessage } = req.body as RequestProps
|
||||
let firstChunk = true
|
||||
let chatLength = 0
|
||||
let newChatLength = 0
|
||||
await chatReplyProcess({
|
||||
message: prompt,
|
||||
lastContext: options,
|
||||
process: (chat: ChatMessage) => {
|
||||
res.write(firstChunk ? JSON.stringify(chat) : `\n${JSON.stringify(chat)}`)
|
||||
if (firstChunk) {
|
||||
res.write(`${JSON.stringify(chat)}t1h1i4s5i1s4a1s9i1l9l8y1s0plit`)
|
||||
firstChunk = false
|
||||
}
|
||||
else if (chatLength !== chat.text.length) {
|
||||
newChatLength = chat.text.length
|
||||
res.write(chat.text.substring(chatLength, newChatLength))
|
||||
chatLength = newChatLength
|
||||
}
|
||||
},
|
||||
systemMessage,
|
||||
})
|
||||
|
|
|
@ -107,7 +107,9 @@ async function onConversation() {
|
|||
scrollToBottom()
|
||||
|
||||
try {
|
||||
let lastText = ''
|
||||
const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit'
|
||||
let renderText = ''
|
||||
let firstTime = true
|
||||
const fetchChatAPIOnce = async () => {
|
||||
await fetchChatAPIProcess<Chat.ConversationResponse>({
|
||||
prompt: message,
|
||||
|
@ -117,42 +119,49 @@ async function onConversation() {
|
|||
const xhr = event.target
|
||||
const { responseText } = xhr
|
||||
// Always process the final line
|
||||
const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
|
||||
let chunk = responseText
|
||||
if (lastIndex !== -1)
|
||||
chunk = responseText.substring(lastIndex)
|
||||
|
||||
const splitIndexBegin = responseText.search(magicSplit)
|
||||
if (splitIndexBegin !== -1) {
|
||||
const splitIndexEnd = splitIndexBegin + magicSplit.length
|
||||
|
||||
const firstChunk = responseText.substring(0, splitIndexBegin)
|
||||
const deltaText = responseText.substring(splitIndexEnd)
|
||||
try {
|
||||
const data = JSON.parse(chunk)
|
||||
const data = JSON.parse(firstChunk)
|
||||
if (firstTime) {
|
||||
firstTime = false
|
||||
renderText = data.text ?? ''
|
||||
}
|
||||
else {
|
||||
renderText = deltaText ?? ''
|
||||
}
|
||||
updateChat(
|
||||
+uuid,
|
||||
dataSources.value.length - 1,
|
||||
{
|
||||
dateTime: new Date().toLocaleString(),
|
||||
text: lastText + data.text ?? '',
|
||||
text: renderText,
|
||||
inversion: false,
|
||||
error: false,
|
||||
loading: false,
|
||||
conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id },
|
||||
requestOptions: { prompt: message, options: { ...options } },
|
||||
requestOptions: { prompt: message, ...options },
|
||||
},
|
||||
)
|
||||
|
||||
if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
|
||||
options.parentMessageId = data.id
|
||||
lastText = data.text
|
||||
message = ''
|
||||
return fetchChatAPIOnce()
|
||||
}
|
||||
|
||||
scrollToBottomIfAtBottom()
|
||||
}
|
||||
catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
await fetchChatAPIOnce()
|
||||
}
|
||||
catch (error: any) {
|
||||
|
@ -237,7 +246,9 @@ async function onRegenerate(index: number) {
|
|||
)
|
||||
|
||||
try {
|
||||
let lastText = ''
|
||||
const magicSplit = 't1h1i4s5i1s4a1s9i1l9l8y1s0plit'
|
||||
let renderText = ''
|
||||
let firstTime = true
|
||||
const fetchChatAPIOnce = async () => {
|
||||
await fetchChatAPIProcess<Chat.ConversationResponse>({
|
||||
prompt: message,
|
||||
|
@ -247,18 +258,28 @@ async function onRegenerate(index: number) {
|
|||
const xhr = event.target
|
||||
const { responseText } = xhr
|
||||
// Always process the final line
|
||||
const lastIndex = responseText.lastIndexOf('\n', responseText.length - 2)
|
||||
let chunk = responseText
|
||||
if (lastIndex !== -1)
|
||||
chunk = responseText.substring(lastIndex)
|
||||
|
||||
const splitIndexBegin = responseText.search(magicSplit)
|
||||
if (splitIndexBegin !== -1) {
|
||||
const splitIndexEnd = splitIndexBegin + magicSplit.length
|
||||
|
||||
const firstChunk = responseText.substring(0, splitIndexBegin)
|
||||
const deltaText = responseText.substring(splitIndexEnd)
|
||||
try {
|
||||
const data = JSON.parse(chunk)
|
||||
const data = JSON.parse(firstChunk)
|
||||
if (firstTime) {
|
||||
firstTime = false
|
||||
renderText = data.text ?? ''
|
||||
}
|
||||
else {
|
||||
renderText = deltaText ?? ''
|
||||
}
|
||||
updateChat(
|
||||
+uuid,
|
||||
index,
|
||||
{
|
||||
dateTime: new Date().toLocaleString(),
|
||||
text: lastText + data.text ?? '',
|
||||
text: renderText,
|
||||
inversion: false,
|
||||
error: false,
|
||||
loading: false,
|
||||
|
@ -269,7 +290,6 @@ async function onRegenerate(index: number) {
|
|||
|
||||
if (openLongReply && data.detail.choices[0].finish_reason === 'length') {
|
||||
options.parentMessageId = data.id
|
||||
lastText = data.text
|
||||
message = ''
|
||||
return fetchChatAPIOnce()
|
||||
}
|
||||
|
@ -277,6 +297,7 @@ async function onRegenerate(index: number) {
|
|||
catch (error) {
|
||||
//
|
||||
}
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
@ -467,20 +488,13 @@ onUnmounted(() => {
|
|||
<template>
|
||||
<div class="flex flex-col w-full h-full">
|
||||
<HeaderComponent
|
||||
v-if="isMobile"
|
||||
:using-context="usingContext"
|
||||
@export="handleExport"
|
||||
v-if="isMobile" :using-context="usingContext" @export="handleExport"
|
||||
@toggle-using-context="toggleUsingContext"
|
||||
/>
|
||||
<main class="flex-1 overflow-hidden">
|
||||
<div id="scrollRef" ref="scrollRef" class="h-full overflow-hidden overflow-y-auto">
|
||||
<div
|
||||
id="scrollRef"
|
||||
ref="scrollRef"
|
||||
class="h-full overflow-hidden overflow-y-auto"
|
||||
>
|
||||
<div
|
||||
id="image-wrapper"
|
||||
class="w-full max-w-screen-xl m-auto dark:bg-[#101014]"
|
||||
id="image-wrapper" class="w-full max-w-screen-xl m-auto dark:bg-[#101014]"
|
||||
:class="[isMobile ? 'p-2' : 'p-4']"
|
||||
>
|
||||
<template v-if="!dataSources.length">
|
||||
|
@ -492,14 +506,8 @@ onUnmounted(() => {
|
|||
<template v-else>
|
||||
<div>
|
||||
<Message
|
||||
v-for="(item, index) of dataSources"
|
||||
:key="index"
|
||||
:date-time="item.dateTime"
|
||||
:text="item.text"
|
||||
:inversion="item.inversion"
|
||||
:error="item.error"
|
||||
:loading="item.loading"
|
||||
@regenerate="onRegenerate(index)"
|
||||
v-for="(item, index) of dataSources" :key="index" :date-time="item.dateTime" :text="item.text"
|
||||
:inversion="item.inversion" :error="item.error" :loading="item.loading" @regenerate="onRegenerate(index)"
|
||||
@delete="handleDelete(index)"
|
||||
/>
|
||||
<div class="sticky bottom-0 left-0 flex justify-center">
|
||||
|
@ -536,15 +544,9 @@ onUnmounted(() => {
|
|||
<NAutoComplete v-model:value="prompt" :options="searchOptions" :render-label="renderOption">
|
||||
<template #default="{ handleInput, handleBlur, handleFocus }">
|
||||
<NInput
|
||||
ref="inputRef"
|
||||
v-model:value="prompt"
|
||||
type="textarea"
|
||||
:placeholder="placeholder"
|
||||
:autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }"
|
||||
@input="handleInput"
|
||||
@focus="handleFocus"
|
||||
@blur="handleBlur"
|
||||
@keypress="handleEnter"
|
||||
ref="inputRef" v-model:value="prompt" type="textarea" :placeholder="placeholder"
|
||||
:autosize="{ minRows: 1, maxRows: isMobile ? 4 : 8 }" @input="handleInput" @focus="handleFocus"
|
||||
@blur="handleBlur" @keypress="handleEnter"
|
||||
/>
|
||||
</template>
|
||||
</NAutoComplete>
|
||||
|
|
Loading…
Reference in New Issue