From 8a02b3b04a296e29137e83ad1ef5913a01ce142d Mon Sep 17 00:00:00 2001 From: archer <545436317@qq.com> Date: Thu, 6 Apr 2023 11:42:47 +0800 Subject: [PATCH] =?UTF-8?q?perf:=20=E5=93=8D=E5=BA=94=E6=B5=81=E6=8A=BD?= =?UTF-8?q?=E7=A6=BB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/pages/api/chat/chatGpt.ts | 51 +++--------------- src/pages/api/{chat => openapi}/lafGpt.ts | 48 +++-------------- src/service/utils/openai.ts | 66 +++++++++++++++++++++++ 3 files changed, 79 insertions(+), 86 deletions(-) rename src/pages/api/{chat => openapi}/lafGpt.ts (85%) diff --git a/src/pages/api/chat/chatGpt.ts b/src/pages/api/chat/chatGpt.ts index cea043454..00b9b917f 100644 --- a/src/pages/api/chat/chatGpt.ts +++ b/src/pages/api/chat/chatGpt.ts @@ -1,5 +1,4 @@ import type { NextApiRequest, NextApiResponse } from 'next'; -import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser'; import { connectToDatabase } from '@/service/mongo'; import { getOpenAIApi, authChat } from '@/service/utils/chat'; import { httpsAgent, openaiChatFilter } from '@/service/utils/tools'; @@ -10,6 +9,7 @@ import type { ModelSchema } from '@/types/mongoSchema'; import { PassThrough } from 'stream'; import { modelList } from '@/constants/model'; import { pushChatBill } from '@/service/events/pushBill'; +import { gpt35StreamResponse } from '@/service/utils/openai'; /* 发送提示词 */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -102,52 +102,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) console.log('api response time:', `${(Date.now() - startTime) / 1000}s`); - // 创建响应流 - res.setHeader('Content-Type', 'text/event-stream;charset-utf-8'); - res.setHeader('Access-Control-Allow-Origin', '*'); - res.setHeader('X-Accel-Buffering', 'no'); - res.setHeader('Cache-Control', 'no-cache, no-transform'); step = 1; - let responseContent = ''; - stream.pipe(res); - - const onParse = async (event: ParsedEvent | ReconnectInterval) => { - if (event.type !== 'event') return; - const data = event.data; - if (data === '[DONE]') return; - try { - const json = JSON.parse(data); - const content: string = json?.choices?.[0].delta.content || ''; - // 空内容不要。首行换行符不要 - if (!content || (responseContent === '' && content === '\n')) return; - - responseContent += content; - // console.log('content:', content) - !stream.destroyed && stream.push(content.replace(/\n/g, '
')); - } catch (error) { - error; - } - }; - - const decoder = new TextDecoder(); - try { - for await (const chunk of chatResponse.data as any) { - if (stream.destroyed) { - // 流被中断了,直接忽略后面的内容 - break; - } - const parser = createParser(onParse); - parser.feed(decoder.decode(chunk)); - } - } catch (error) { - console.log('pipe error', error); - } - // close stream - !stream.destroyed && stream.push(null); - stream.destroy(); - + const { responseContent } = await gpt35StreamResponse({ + res, + stream, + chatResponse + }); const promptsContent = formatPrompts.map((item) => item.content).join(''); + // 只有使用平台的 key 才计费 pushChatBill({ isPay: !userApiKey, diff --git a/src/pages/api/chat/lafGpt.ts b/src/pages/api/openapi/lafGpt.ts similarity index 85% rename from src/pages/api/chat/lafGpt.ts rename to src/pages/api/openapi/lafGpt.ts index 0edda1d18..162d40d4c 100644 --- a/src/pages/api/chat/lafGpt.ts +++ b/src/pages/api/openapi/lafGpt.ts @@ -14,6 +14,7 @@ import { connectRedis } from '@/service/redis'; import { VecModelDataPrefix } from '@/constants/redis'; import { vectorToBuffer } from '@/utils/tools'; import { openaiCreateEmbedding } from '@/service/utils/openai'; +import { gpt35StreamResponse } from '@/service/utils/openai'; /* 发送提示词 */ export default async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -208,49 +209,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) console.log('api response time:', `${(Date.now() - startTime) / 1000}s`); - // 创建响应流 - res.setHeader('Content-Type', 'text/event-stream;charset-utf-8'); - res.setHeader('Access-Control-Allow-Origin', '*'); - res.setHeader('X-Accel-Buffering', 'no'); - res.setHeader('Cache-Control', 'no-cache, no-transform'); step = 1; - - let responseContent = ''; - stream.pipe(res); - - const onParse = async (event: ParsedEvent | ReconnectInterval) => { - if (event.type !== 'event') return; - const data = event.data; - if (data === '[DONE]') return; - try { - const json = JSON.parse(data); - const content: string = json?.choices?.[0].delta.content || ''; - if (!content || (responseContent === '' && content === '\n')) return; - - responseContent += content; - // console.log('content:', content) - !stream.destroyed && stream.push(content.replace(/\n/g, '
')); - } catch (error) { - error; - } - }; - - const decoder = new TextDecoder(); - try { - for await (const chunk of chatResponse.data as any) { - if (stream.destroyed) { - // 流被中断了,直接忽略后面的内容 - break; - } - const parser = createParser(onParse); - parser.feed(decoder.decode(chunk)); - } - } catch (error) { - console.log('pipe error', error); - } - // close stream - !stream.destroyed && stream.push(null); - stream.destroy(); + const { responseContent } = await gpt35StreamResponse({ + res, + stream, + chatResponse + }); const promptsContent = formatPrompts.map((item) => item.content).join(''); // 只有使用平台的 key 才计费 diff --git a/src/service/utils/openai.ts b/src/service/utils/openai.ts index 29ecccb52..c876f21c3 100644 --- a/src/service/utils/openai.ts +++ b/src/service/utils/openai.ts @@ -1,3 +1,6 @@ +import type { NextApiResponse } from 'next'; +import type { PassThrough } from 'stream'; +import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser'; import { getOpenAIApi } from '@/service/utils/chat'; import { httpsAgent } from './tools'; import { User } from '../models/user'; @@ -102,3 +105,66 @@ export const openaiCreateEmbedding = async ({ chatAPI }; }; + +/* gpt35 响应 */ +export const gpt35StreamResponse = ({ + res, + stream, + chatResponse +}: { + res: NextApiResponse; + stream: PassThrough; + chatResponse: any; +}) => + new Promise<{ responseContent: string }>(async (resolve, reject) => { + try { + // 创建响应流 + res.setHeader('Content-Type', 'text/event-stream;charset-utf-8'); + res.setHeader('Access-Control-Allow-Origin', '*'); + res.setHeader('X-Accel-Buffering', 'no'); + res.setHeader('Cache-Control', 'no-cache, no-transform'); + + let responseContent = ''; + stream.pipe(res); + + const onParse = async (event: ParsedEvent | ReconnectInterval) => { + if (event.type !== 'event') return; + const data = event.data; + if (data === '[DONE]') return; + try { + const json = JSON.parse(data); + const content: string = json?.choices?.[0].delta.content || ''; + // console.log('content:', content); + if (!content || (responseContent === '' && content === '\n')) return; + + responseContent += content; + !stream.destroyed && stream.push(content.replace(/\n/g, '
')); + } catch (error) { + error; + } + }; + + const decoder = new TextDecoder(); + try { + for await (const chunk of chatResponse.data as any) { + if (stream.destroyed) { + // 流被中断了,直接忽略后面的内容 + break; + } + const parser = createParser(onParse); + parser.feed(decoder.decode(chunk)); + } + } catch (error) { + console.log('pipe error', error); + } + // close stream + !stream.destroyed && stream.push(null); + stream.destroy(); + + resolve({ + responseContent + }); + } catch (error) { + reject(error); + } + });