perf: 优化系统提示消息

This commit is contained in:
Archer
2023-03-17 00:53:27 +08:00
parent 1e770088d0
commit d85b4c0945
6 changed files with 26 additions and 28 deletions

View File

@@ -35,9 +35,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// 上下文长度过滤
const maxContext = model.security.contextMaxLen;
const filterPrompts =
prompts.length > maxContext + 2
? [prompts[0], ...prompts.slice(prompts.length - maxContext)]
: prompts.slice(0, prompts.length);
prompts.length > maxContext ? prompts.slice(prompts.length - maxContext) : prompts;
// 格式化文本内容
const map = {
@@ -51,11 +49,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
content: item.value
})
);
// 第一句话,强调代码类型
// 如果有系统提示词,自动插入
if (model.systemPrompt) {
formatPrompts.unshift({
role: ChatCompletionRequestMessageRoleEnum.System,
content: '如果你想返回代码,请务必声明代码的类型!并且在代码块前加一个换行符。'
role: 'system',
content: model.systemPrompt
});
}
// 获取 chatAPI
const chatAPI = getOpenAIApi(userApiKey);
@@ -77,8 +77,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
);
console.log(
'response success',
`${(Date.now() - startTime) / 1000}s`,
formatPrompts.reduce((sum, item) => sum + item.content.length, 0)
`time: ${(Date.now() - startTime) / 1000}s`,
`promptLen: ${formatPrompts.length}`,
`contentLen: ${formatPrompts.reduce((sum, item) => sum + item.content.length, 0)}`
);
// 创建响应流

View File

@@ -49,21 +49,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
error;
}
const defaultContent = model.systemPrompt
? [
{
obj: 'SYSTEM',
value: model.systemPrompt
}
]
: [];
if (!history) {
// 没有记录,创建一个
const response = await ChatWindow.create({
chatId,
updateTime: Date.now(),
content: defaultContent
content: []
});
responseId = response._id;
}
@@ -78,7 +69,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
secret: model.security,
chatModel: model.service.chatModel
},
history: history ? history.content : defaultContent
history: history ? history.content : []
}
});
} catch (err) {

View File

@@ -249,9 +249,9 @@ const Chat = ({
/* 对长度进行限制 */
const maxContext = chatSiteData.secret.contextMaxLen;
const requestPrompt =
newChatList.length > maxContext + 2
? [newChatList[0], ...newChatList.slice(newChatList.length - maxContext - 1, -1)]
: newChatList.slice(0, newChatList.length - 1);
newChatList.length > maxContext + 1
? newChatList.slice(newChatList.length - maxContext - 1, -1)
: newChatList.slice(0, -1);
if (typeof fnMap[chatSiteData.chatModel] === 'function') {
await fnMap[chatSiteData.chatModel](requestPrompt);

View File

@@ -99,7 +99,9 @@ const ModelEditForm = ({ model }: { model?: ModelType }) => {
rows={4}
maxLength={500}
{...register('systemPrompt')}
placeholder={'系统的提示词,会在进入聊天时放置在第一句,用于限定模型的聊天范围'}
placeholder={
'模型默认的 prompt 词,可以通过调整该内容,生成一个限定范围的模型,更方便的去使用。'
}
/>
</FormControl>
</Card>

View File

@@ -2,7 +2,8 @@ export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法',
rate_limit_reached: '同时访问用户过多,请稍后再试',
'Bad Request': '上下文太多了,请重开对话~'
'Bad Request': '上下文太多了,请重开对话~',
'Too Many Requests': '请求次数太多了,请慢点~'
};
export const proxyError: Record<string, boolean> = {
ECONNABORTED: true,

View File

@@ -28,8 +28,11 @@ export const jsonRes = (
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
// console.log(error?.response)
console.log('error->', error.code, error?.response?.statusText, msg);
// console.log(error?.response);
console.log('error->');
console.log('code:', error.code);
console.log('statusText:', error?.response?.statusText);
console.log('msg:', msg);
}
res.json({