From 982cab6cae8e203b13e28c38bf46df6f145e6879 Mon Sep 17 00:00:00 2001 From: Yige Date: Fri, 10 Mar 2023 13:23:22 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=94=AF=E6=8C=81=E9=95=BF=E5=9B=9E?= =?UTF-8?q?=E5=A4=8D=20(#450)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: rename environment variables files * docs: update README.md about .env file * feat: support long reply * chore: upgrade chatgpt package and set long reply to false default * chore: set long reply to false default --- service/src/chatgpt/index.ts | 4 +- src/views/chat/index.vue | 162 ++++++++++++++++++++--------------- 2 files changed, 96 insertions(+), 70 deletions(-) diff --git a/service/src/chatgpt/index.ts b/service/src/chatgpt/index.ts index a9eecd2..9ddebaf 100644 --- a/service/src/chatgpt/index.ts +++ b/service/src/chatgpt/index.ts @@ -87,8 +87,8 @@ async function chatReplyProcess( lastContext?: { conversationId?: string; parentMessageId?: string }, process?: (chat: ChatMessage) => void, ) { - if (!message) - return sendResponse({ type: 'Fail', message: 'Message is empty' }) + // if (!message) + // return sendResponse({ type: 'Fail', message: 'Message is empty' }) try { let options: SendMessageOptions = { timeoutMs } diff --git a/src/views/chat/index.vue b/src/views/chat/index.vue index 1eeefd2..754ac7c 100644 --- a/src/views/chat/index.vue +++ b/src/views/chat/index.vue @@ -15,6 +15,8 @@ import { t } from '@/locales' let controller = new AbortController() +const openLongReply = import.meta.env.VITE_GLOB_OPEN_LONG_REPLY === 'true' + const route = useRoute() const dialog = useDialog() const ms = useMessage() @@ -41,7 +43,7 @@ function handleSubmit() { } async function onConversation() { - const message = prompt.value + let message = prompt.value if (loading.value) return @@ -88,40 +90,53 @@ async function onConversation() { scrollToBottom() try { - await fetchChatAPIProcess({ - prompt: message, - options, - signal: controller.signal, - onDownloadProgress: ({ event }) => { - const xhr = event.target - const { responseText } = xhr - // Always process the final line - const lastIndex = responseText.lastIndexOf('\n') - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - updateChat( - +uuid, - dataSources.value.length - 1, - { - dateTime: new Date().toLocaleString(), - text: data.text ?? '', - inversion: false, - error: false, - loading: false, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, options: { ...options } }, - }, - ) - scrollToBottom() - } - catch (error) { + let lastText = '' + const fetchChatAPIOnce = async () => { + await fetchChatAPIProcess({ + prompt: message, + options, + signal: controller.signal, + onDownloadProgress: ({ event }) => { + const xhr = event.target + const { responseText } = xhr + // Always process the final line + const lastIndex = responseText.lastIndexOf('\n') + let chunk = responseText + if (lastIndex !== -1) + chunk = responseText.substring(lastIndex) + try { + const data = JSON.parse(chunk) + updateChat( + +uuid, + dataSources.value.length - 1, + { + dateTime: new Date().toLocaleString(), + text: lastText + data.text ?? '', + inversion: false, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, options: { ...options } }, + }, + ) + + if (openLongReply && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + lastText = data.text + message = '' + return fetchChatAPIOnce() + } + + scrollToBottom() + } + catch (error) { // - } - }, - }) + } + }, + }) + } + + await fetchChatAPIOnce() } catch (error: any) { const errorMessage = error?.message ?? t('common.wrong') @@ -181,7 +196,7 @@ async function onRegenerate(index: number) { const { requestOptions } = dataSources.value[index] - const message = requestOptions?.prompt ?? '' + let message = requestOptions?.prompt ?? '' let options: Chat.ConversationRequest = {} @@ -205,39 +220,50 @@ async function onRegenerate(index: number) { ) try { - await fetchChatAPIProcess({ - prompt: message, - options, - signal: controller.signal, - onDownloadProgress: ({ event }) => { - const xhr = event.target - const { responseText } = xhr - // Always process the final line - const lastIndex = responseText.lastIndexOf('\n') - let chunk = responseText - if (lastIndex !== -1) - chunk = responseText.substring(lastIndex) - try { - const data = JSON.parse(chunk) - updateChat( - +uuid, - index, - { - dateTime: new Date().toLocaleString(), - text: data.text ?? '', - inversion: false, - error: false, - loading: false, - conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, - requestOptions: { prompt: message, ...options }, - }, - ) - } - catch (error) { - // - } - }, - }) + let lastText = '' + const fetchChatAPIOnce = async () => { + await fetchChatAPIProcess({ + prompt: message, + options, + signal: controller.signal, + onDownloadProgress: ({ event }) => { + const xhr = event.target + const { responseText } = xhr + // Always process the final line + const lastIndex = responseText.lastIndexOf('\n') + let chunk = responseText + if (lastIndex !== -1) + chunk = responseText.substring(lastIndex) + try { + const data = JSON.parse(chunk) + updateChat( + +uuid, + index, + { + dateTime: new Date().toLocaleString(), + text: lastText + data.text ?? '', + inversion: false, + error: false, + loading: false, + conversationOptions: { conversationId: data.conversationId, parentMessageId: data.id }, + requestOptions: { prompt: message, ...options }, + }, + ) + + if (openLongReply && data.detail.choices[0].finish_reason === 'length') { + options.parentMessageId = data.id + lastText = data.text + message = '' + return fetchChatAPIOnce() + } + } + catch (error) { + // + } + }, + }) + } + await fetchChatAPIOnce() } catch (error: any) { if (error.message === 'canceled') {