From c26be2e88561f011cb408b531b6177e11ec7c2db Mon Sep 17 00:00:00 2001 From: archer <545436317@qq.com> Date: Mon, 7 Aug 2023 13:18:45 +0800 Subject: [PATCH] perf: chat completion api --- client/src/api/fetch.ts | 1 + client/src/pages/api/chat/chatTest.ts | 3 +- .../pages/api/openapi/v1/chat/completions.ts | 29 ++++++++++++++----- .../src/service/moduleDispatch/chat/oneapi.ts | 20 +++++++++++-- .../service/moduleDispatch/tools/answer.ts | 5 ++-- client/src/utils/adapt.ts | 6 ++-- 6 files changed, 47 insertions(+), 17 deletions(-) diff --git a/client/src/api/fetch.ts b/client/src/api/fetch.ts index 08b70d0e0..9fe0ec182 100644 --- a/client/src/api/fetch.ts +++ b/client/src/api/fetch.ts @@ -31,6 +31,7 @@ export const streamFetch = ({ signal: abortSignal.signal, body: JSON.stringify({ ...data, + detail: true, stream: true }) }); diff --git a/client/src/pages/api/chat/chatTest.ts b/client/src/pages/api/chat/chatTest.ts index 2f3624b14..4817e2257 100644 --- a/client/src/pages/api/chat/chatTest.ts +++ b/client/src/pages/api/chat/chatTest.ts @@ -54,7 +54,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) history: gptMessage2ChatType(history), userChatInput: prompt }, - stream: true + stream: true, + detail: true }); sseResponse({ diff --git a/client/src/pages/api/openapi/v1/chat/completions.ts b/client/src/pages/api/openapi/v1/chat/completions.ts index 15480647c..884f2a1f9 100644 --- a/client/src/pages/api/openapi/v1/chat/completions.ts +++ b/client/src/pages/api/openapi/v1/chat/completions.ts @@ -41,6 +41,7 @@ export type Props = CreateChatCompletionRequest & FastGptShareChatProps & { messages: MessageItemType[]; stream?: boolean; + detail?: boolean; variables: Record; }; export type ChatResponseType = { @@ -57,7 +58,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex res.end(); }); - let { chatId, appId, shareId, stream = false, messages = [], variables = {} } = req.body as Props; + let { + chatId, + appId, + shareId, + stream = false, + detail = false, + messages = [], + variables = {} + } = req.body as Props; try { if (!messages) { @@ -133,7 +142,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex history: prompts, userChatInput: prompt.value }, - stream + stream, + detail }); // console.log(responseData, '===', answerText); @@ -176,7 +186,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex if (stream) { sseResponse({ res, - event: sseResponseEventEnum.answer, + event: detail ? sseResponseEventEnum.answer : undefined, data: textAdaptGptResponse({ text: null, finish_reason: 'stop' @@ -184,11 +194,11 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex }); sseResponse({ res, - event: sseResponseEventEnum.answer, + event: detail ? sseResponseEventEnum.answer : undefined, data: '[DONE]' }); - if (isOwner) { + if (isOwner && detail) { sseResponse({ res, event: sseResponseEventEnum.appStreamResponse, @@ -199,7 +209,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex res.end(); } else { res.json({ - responseData, + ...(detail ? { responseData } : {}), id: chatId || '', model: '', usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 }, @@ -244,7 +254,8 @@ export async function dispatchModules({ user, params = {}, variables = {}, - stream = false + stream = false, + detail = false }: { res: NextApiResponse; modules: AppModuleItemType[]; @@ -252,6 +263,7 @@ export async function dispatchModules({ params?: Record; variables?: Record; stream?: boolean; + detail?: boolean; }) { const runningModules = loadModules(modules, variables); @@ -322,7 +334,7 @@ export async function dispatchModules({ if (res.closed) return Promise.resolve(); console.log('run=========', module.flowType); - if (stream && module.showStatus) { + if (stream && detail && module.showStatus) { responseStatus({ res, name: module.name, @@ -338,6 +350,7 @@ export async function dispatchModules({ const props: Record = { res, stream, + detail, userOpenaiAccount: user?.openaiAccount, ...params }; diff --git a/client/src/service/moduleDispatch/chat/oneapi.ts b/client/src/service/moduleDispatch/chat/oneapi.ts index e0b72a998..c993eba84 100644 --- a/client/src/service/moduleDispatch/chat/oneapi.ts +++ b/client/src/service/moduleDispatch/chat/oneapi.ts @@ -25,6 +25,7 @@ export type ChatProps = { history?: ChatItemType[]; userChatInput: string; stream?: boolean; + detail?: boolean; quoteQA?: QuoteItemType[]; systemPrompt?: string; limitPrompt?: string; @@ -44,6 +45,7 @@ export const dispatchChatCompletion = async (props: Record): Promis temperature = 0, maxToken = 4000, stream = false, + detail = false, history = [], quoteQA = [], userChatInput, @@ -111,7 +113,11 @@ export const dispatchChatCompletion = async (props: Record): Promis const { answerText, totalTokens, completeMessages } = await (async () => { if (stream) { // sse response - const { answer } = await streamResponse({ res, response }); + const { answer } = await streamResponse({ + res, + detail, + response + }); // count tokens const completeMessages = filterMessages.concat({ obj: ChatRoleEnum.AI, @@ -282,7 +288,15 @@ function getMaxTokens({ }; } -async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) { +async function streamResponse({ + res, + detail, + response +}: { + res: NextApiResponse; + detail: boolean; + response: any; +}) { let answer = ''; let error: any = null; const parseData = new SSEParseData(); @@ -301,7 +315,7 @@ async function streamResponse({ res, response }: { res: NextApiResponse; respons sseResponse({ res, - event: sseResponseEventEnum.answer, + event: detail ? sseResponseEventEnum.answer : undefined, data: textAdaptGptResponse({ text: content }) diff --git a/client/src/service/moduleDispatch/tools/answer.ts b/client/src/service/moduleDispatch/tools/answer.ts index c292b97c7..b061a0134 100644 --- a/client/src/service/moduleDispatch/tools/answer.ts +++ b/client/src/service/moduleDispatch/tools/answer.ts @@ -5,6 +5,7 @@ import type { NextApiResponse } from 'next'; export type AnswerProps = { res: NextApiResponse; + detail?: boolean; text: string; stream: boolean; }; @@ -13,12 +14,12 @@ export type AnswerResponse = { }; export const dispatchAnswer = (props: Record): AnswerResponse => { - const { res, text = '', stream } = props as AnswerProps; + const { res, detail, text = '', stream } = props as AnswerProps; if (stream) { sseResponse({ res, - event: sseResponseEventEnum.answer, + event: detail ? sseResponseEventEnum.answer : undefined, data: textAdaptGptResponse({ text: text.replace(/\\n/g, '\n') }) diff --git a/client/src/utils/adapt.ts b/client/src/utils/adapt.ts index f38116471..fa6ace9c3 100644 --- a/client/src/utils/adapt.ts +++ b/client/src/utils/adapt.ts @@ -6,11 +6,11 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai'; import { ChatRoleEnum } from '@/constants/chat'; import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions'; import type { AppModuleItemType } from '@/types/app'; -import type { FlowModuleItemType, FlowModuleTemplateType } from '@/types/flow'; +import type { FlowModuleItemType } from '@/types/flow'; import type { Edge, Node } from 'reactflow'; import { connectionLineStyle } from '@/constants/flow'; import { customAlphabet } from 'nanoid'; -import { EmptyModule, ModuleTemplates, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate'; +import { EmptyModule, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate'; const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6); export const adaptBill = (bill: BillSchema): UserBillType => { @@ -41,7 +41,7 @@ export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[] export const textAdaptGptResponse = ({ text, - model, + model = '', finish_reason = null, extraData = {} }: {