fix: prompts filter

This commit is contained in:
archer
2023-05-30 21:27:09 +08:00
parent 0cde9a10a8
commit 176c5a4d79
4 changed files with 10 additions and 5 deletions

View File

@@ -129,7 +129,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
);
// 发出 chat 请求
const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
@@ -147,7 +149,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
prompts: responseMessages
});
// save chat

View File

@@ -88,7 +88,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
);
// 发出请求
const { streamResponse } = await modelServiceToolMap[model.chat.chatModel].chatCompletion({
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
@@ -106,7 +108,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
prompts: responseMessages
});
res.end();

View File

@@ -140,7 +140,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts
prompts: responseMessages
});
res.end();
return {

View File

@@ -104,6 +104,7 @@ export const openAiStreamResponse = async ({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});