From d85b4c09457e8954738623f30838d60bf68e8f4c Mon Sep 17 00:00:00 2001 From: Archer <545436317@qq.com> Date: Fri, 17 Mar 2023 00:53:27 +0800 Subject: [PATCH] =?UTF-8?q?perf:=20=E4=BC=98=E5=8C=96=E7=B3=BB=E7=BB=9F?= =?UTF-8?q?=E6=8F=90=E7=A4=BA=E6=B6=88=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/pages/api/chat/chatGpt.ts | 21 ++++++++++---------- src/pages/api/chat/init.ts | 13 ++---------- src/pages/chat/index.tsx | 6 +++--- src/pages/model/components/ModelEditForm.tsx | 4 +++- src/service/errorCode.ts | 3 ++- src/service/response.ts | 7 +++++-- 6 files changed, 26 insertions(+), 28 deletions(-) diff --git a/src/pages/api/chat/chatGpt.ts b/src/pages/api/chat/chatGpt.ts index 662c6c481..c7e925b7a 100644 --- a/src/pages/api/chat/chatGpt.ts +++ b/src/pages/api/chat/chatGpt.ts @@ -35,9 +35,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) // 上下文长度过滤 const maxContext = model.security.contextMaxLen; const filterPrompts = - prompts.length > maxContext + 2 - ? [prompts[0], ...prompts.slice(prompts.length - maxContext)] - : prompts.slice(0, prompts.length); + prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts; // 格式化文本内容 const map = { @@ -51,11 +49,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) content: item.value }) ); - // 第一句话,强调代码类型 - formatPrompts.unshift({ - role: ChatCompletionRequestMessageRoleEnum.System, - content: '如果你想返回代码,请务必声明代码的类型!并且在代码块前加一个换行符。' - }); + // 如果有系统提示词,自动插入 + if (model.systemPrompt) { + formatPrompts.unshift({ + role: 'system', + content: model.systemPrompt + }); + } // 获取 chatAPI const chatAPI = getOpenAIApi(userApiKey); @@ -77,8 +77,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) ); console.log( 'response success', - `${(Date.now() - startTime) / 1000}s`, - formatPrompts.reduce((sum, item) => sum + item.content.length, 0) + `time: ${(Date.now() - startTime) / 1000}s`, + `promptLen: ${formatPrompts.length}`, + `contentLen: ${formatPrompts.reduce((sum, item) => sum + item.content.length, 0)}` ); // 创建响应流 diff --git a/src/pages/api/chat/init.ts b/src/pages/api/chat/init.ts index c32e57fd8..9f88c19ae 100644 --- a/src/pages/api/chat/init.ts +++ b/src/pages/api/chat/init.ts @@ -49,21 +49,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) error; } - const defaultContent = model.systemPrompt - ? [ - { - obj: 'SYSTEM', - value: model.systemPrompt - } - ] - : []; - if (!history) { // 没有记录,创建一个 const response = await ChatWindow.create({ chatId, updateTime: Date.now(), - content: defaultContent + content: [] }); responseId = response._id; } @@ -78,7 +69,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) secret: model.security, chatModel: model.service.chatModel }, - history: history ? history.content : defaultContent + history: history ? history.content : [] } }); } catch (err) { diff --git a/src/pages/chat/index.tsx b/src/pages/chat/index.tsx index f366d6fc5..87ed7fd5a 100644 --- a/src/pages/chat/index.tsx +++ b/src/pages/chat/index.tsx @@ -249,9 +249,9 @@ const Chat = ({ /* 对长度进行限制 */ const maxContext = chatSiteData.secret.contextMaxLen; const requestPrompt = - newChatList.length > maxContext + 2 - ? [newChatList[0], ...newChatList.slice(newChatList.length - maxContext - 1, -1)] - : newChatList.slice(0, newChatList.length - 1); + newChatList.length > maxContext + 1 + ? newChatList.slice(newChatList.length - maxContext - 1, -1) + : newChatList.slice(0, -1); if (typeof fnMap[chatSiteData.chatModel] === 'function') { await fnMap[chatSiteData.chatModel](requestPrompt); diff --git a/src/pages/model/components/ModelEditForm.tsx b/src/pages/model/components/ModelEditForm.tsx index 65138bbc2..8b1f2b9b3 100644 --- a/src/pages/model/components/ModelEditForm.tsx +++ b/src/pages/model/components/ModelEditForm.tsx @@ -99,7 +99,9 @@ const ModelEditForm = ({ model }: { model?: ModelType }) => { rows={4} maxLength={500} {...register('systemPrompt')} - placeholder={'系统的提示词,会在进入聊天时放置在第一句,用于限定模型的聊天范围'} + placeholder={ + '模型默认的 prompt 词,可以通过调整该内容,生成一个限定范围的模型,更方便的去使用。' + } /> diff --git a/src/service/errorCode.ts b/src/service/errorCode.ts index 607a1116a..ae9f4280d 100644 --- a/src/service/errorCode.ts +++ b/src/service/errorCode.ts @@ -2,7 +2,8 @@ export const openaiError: Record = { context_length_exceeded: '内容超长了,请重置对话', Unauthorized: 'API-KEY 不合法', rate_limit_reached: '同时访问用户过多,请稍后再试', - 'Bad Request': '上下文太多了,请重开对话~' + 'Bad Request': '上下文太多了,请重开对话~', + 'Too Many Requests': '请求次数太多了,请慢点~' }; export const proxyError: Record = { ECONNABORTED: true, diff --git a/src/service/response.ts b/src/service/response.ts index 3bbf2909b..5fe929f67 100644 --- a/src/service/response.ts +++ b/src/service/response.ts @@ -28,8 +28,11 @@ export const jsonRes = ( } else if (openaiError[error?.response?.statusText]) { msg = openaiError[error.response.statusText]; } - // console.log(error?.response) - console.log('error->', error.code, error?.response?.statusText, msg); + // console.log(error?.response); + console.log('error->'); + console.log('code:', error.code); + console.log('statusText:', error?.response?.statusText); + console.log('msg:', msg); } res.json({