fix: tool call history (#4576)

This commit is contained in:
Archer
2025-04-17 14:53:38 +08:00
committed by GitHub
parent 703a7cfd78
commit 5542eadb2d
4 changed files with 112 additions and 57 deletions

View File

@@ -18,7 +18,7 @@ weight: 794
2. 支持以 MCP SSE 协议创建工具。 2. 支持以 MCP SSE 协议创建工具。
3. 批量执行节点支持交互节点,可实现每一轮循环都人工参与。 3. 批量执行节点支持交互节点,可实现每一轮循环都人工参与。
4. 增加工作台二级菜单,合并工具箱。 4. 增加工作台二级菜单,合并工具箱。
5. 增加 grok3、GPT4.1、Gemini2.5 模型系统配置。 5. 增加 grok3、GPT4.1、o系列、Gemini2.5 模型系统配置。
## ⚙️ 优化 ## ⚙️ 优化
@@ -33,3 +33,4 @@ weight: 794
1. 修复子工作流包含交互节点时,未成功恢复子工作流所有数据。 1. 修复子工作流包含交互节点时,未成功恢复子工作流所有数据。
2. completion v1 接口,未接受 interactive 参数,导致 API 调用失败。 2. completion v1 接口,未接受 interactive 参数,导致 API 调用失败。
3. 连续工具调用,上下文截断异常

View File

@@ -122,6 +122,58 @@
"fieldMap": {}, "fieldMap": {},
"type": "llm" "type": "llm"
}, },
{
"model": "o4-mini",
"name": "o4-mini",
"maxContext": 200000,
"maxResponse": 100000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": false
},
{
"model": "o3",
"name": "o3",
"maxContext": 200000,
"maxResponse": 100000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": false
},
{ {
"model": "o3-mini", "model": "o3-mini",
"name": "o3-mini", "name": "o3-mini",
@@ -140,37 +192,7 @@
"usedInQueryExtension": true, "usedInQueryExtension": true,
"customExtractPrompt": "", "customExtractPrompt": "",
"usedInToolCall": true, "usedInToolCall": true,
"defaultConfig": { "defaultConfig": {},
"stream": false
},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "o1-mini",
"name": "o1-mini",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {
"stream": false
},
"fieldMap": { "fieldMap": {
"max_tokens": "max_completion_tokens" "max_tokens": "max_completion_tokens"
}, },
@@ -196,9 +218,33 @@
"usedInQueryExtension": true, "usedInQueryExtension": true,
"customExtractPrompt": "", "customExtractPrompt": "",
"usedInToolCall": true, "usedInToolCall": true,
"defaultConfig": { "defaultConfig": {},
"stream": false "fieldMap": {
"max_tokens": "max_completion_tokens"
}, },
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "o1-mini",
"name": "o1-mini",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": { "fieldMap": {
"max_tokens": "max_completion_tokens" "max_tokens": "max_completion_tokens"
}, },

View File

@@ -36,36 +36,44 @@ export const filterGPTMessageByMaxContext = async ({
const systemPrompts: ChatCompletionMessageParam[] = messages.slice(0, chatStartIndex); const systemPrompts: ChatCompletionMessageParam[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatCompletionMessageParam[] = messages.slice(chatStartIndex); const chatPrompts: ChatCompletionMessageParam[] = messages.slice(chatStartIndex);
if (chatPrompts.length === 0) {
return systemPrompts;
}
// reduce token of systemPrompt // reduce token of systemPrompt
maxContext -= await countGptMessagesTokens(systemPrompts); maxContext -= await countGptMessagesTokens(systemPrompts);
/* 截取时候保证一轮内容的完整性
1. user - assistant - user
2. user - assistant - tool
3. user - assistant - tool - tool - tool
3. user - assistant - tool - assistant - tool
4. user - assistant - assistant - tool - tool
*/
// Save the last chat prompt(question) // Save the last chat prompt(question)
const question = chatPrompts.pop(); let chats: ChatCompletionMessageParam[] = [];
if (!question) { let tmpChats: ChatCompletionMessageParam[] = [];
return systemPrompts;
}
const chats: ChatCompletionMessageParam[] = [question];
// 从后往前截取对话内容, 每次需要截取2个 // 从后往前截取对话内容, 每次到 user 则认为是一组完整信息
while (1) { while (chatPrompts.length > 0) {
const assistant = chatPrompts.pop(); const lastMessage = chatPrompts.pop();
const user = chatPrompts.pop(); if (!lastMessage) {
if (!assistant || !user) {
break; break;
} }
const tokens = await countGptMessagesTokens([assistant, user]); // 遇到 user说明到了一轮完整信息可以开始判断是否需要保留
maxContext -= tokens; if (lastMessage.role === ChatCompletionRequestMessageRoleEnum.User) {
/* 整体 tokens 超出范围,截断 */ const tokens = await countGptMessagesTokens([lastMessage, ...tmpChats]);
if (maxContext < 0) { maxContext -= tokens;
break; // 该轮信息整体 tokens 超出范围,这段数据不要了
} if (maxContext < 0) {
break;
}
chats.unshift(assistant); chats = [lastMessage, ...tmpChats].concat(chats);
chats.unshift(user); tmpChats = [];
} else {
if (chatPrompts.length === 0) { tmpChats.unshift(lastMessage);
break;
} }
} }

View File

@@ -285,7 +285,7 @@ export const runToolWithToolChoice = async (
}, },
toolModel toolModel
); );
// console.log(JSON.stringify(requestMessages, null, 2), '==requestBody'); // console.log(JSON.stringify(filterMessages, null, 2), '==requestMessages');
/* Run llm */ /* Run llm */
const { const {
response: aiResponse, response: aiResponse,