feat: refactor agent dispatch functions and improve tool call handling (#5606)

This commit is contained in:
francis
2025-09-08 20:08:29 +08:00
committed by archer
parent 7bd725d2c8
commit 7427d9b76f
4 changed files with 131 additions and 196 deletions

View File

@@ -51,8 +51,8 @@ import {
valueTypeFormat
} from '@fastgpt/global/core/workflow/runtime/utils';
import { sliceStrStartEnd } from '@fastgpt/global/common/string/tools';
import { transferPlanAgent } from './sub/plan';
import { transferModelAgent } from './sub/model';
import { dispatchPlanAgent } from './sub/plan';
import { dispatchModelAgent } from './sub/model';
import { PlanAgentTool } from './sub/plan/constants';
import { ModelAgentTool } from './sub/model/constants';
import { getSubIdsByAgentSystem, parseAgentSystem } from './utils';
@@ -72,6 +72,7 @@ import { dispatchRunAppNode } from '../../child/runApp';
import { dispatchRunPlugin } from '../../plugin/run';
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { dispatchTool } from './sub/tool';
import { dispatchStopToolCall } from '../agent/sub/stop';
export type DispatchAgentModuleProps = ModuleDispatchProps<{
[NodeInputKeyEnum.history]?: ChatItemType[];
@@ -273,11 +274,89 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
isEnd
} = await (async () => {
if (toolId === SubAppIds.stop) {
const { response, usages } = await dispatchStopToolCall();
return {
response: '',
usages: [],
response,
usages,
isEnd: true
};
} else if (toolId === SubAppIds.plan) {
const { instruction } = parseToolArgs<{ instruction: string }>(
call.function.arguments
);
const { response, usages } = await dispatchPlanAgent({
messages,
params: {
model,
instruction
},
onStream({ text }) {
//TODO: 需要一个新的 plan sse event
workflowStreamResponse?.({
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params: '',
response: sliceStrStartEnd(text, 5000, 5000)
}
}
});
}
});
const lastPlanCallIndex = messages
.slice(0, -1)
.findLastIndex(
(c) =>
c.role === 'assistant' &&
c.tool_calls?.some((tc) => tc.function?.name === SubAppIds.plan)
);
const originalContent =
lastPlanCallIndex !== -1 ? (messages[lastPlanCallIndex + 1].content as string) : '';
// const applyedContent = applyDiff({
// original: originalContent,
// patch: content
// });
return {
response,
usages,
isEnd: false
};
} else if (toolId === SubAppIds.model) {
const { systemPrompt, task } = parseToolArgs<{ systemPrompt: string; task: string }>(
call.function.arguments
);
const { response, usages } = await dispatchModelAgent({
messages,
params: { model, systemPrompt, task },
onStream({ text }) {
workflowStreamResponse?.({
event: SseResponseEventEnum.toolResponse,
data: {
tool: {
id: call.id,
toolName: '',
toolAvatar: '',
params: '',
response: sliceStrStartEnd(text, 5000, 5000)
}
}
});
}
});
return {
response,
usages,
isEnd: false
};
}
// User Sub App
else {
@@ -353,98 +432,6 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise
}
}
})();
// } else if (toolId === SubAppIds.plan) {
// const planModel = planConfig?.model ?? model;
// const { instruction } = parseToolArgs<{ instruction: string }>(call.function.arguments);
// const { content, inputTokens, outputTokens } = await transferPlanAgent({
// model: planModel,
// instruction,
// histories: GPTMessages2Chats({
// messages: context.slice(1, -1),
// getToolInfo
// }),
// onStreaming({ text }) {
// //TODO: 需要一个新的 plan sse event
// workflowStreamResponse?.({
// event: SseResponseEventEnum.toolResponse,
// data: {
// tool: {
// id: call.id,
// toolName: '',
// toolAvatar: '',
// params: '',
// response: sliceStrStartEnd(fullText, 5000, 5000)
// }
// }
// });
// }
// });
// const lastPlanCallIndex = context
// .slice(0, -1)
// .findLastIndex(
// (c) =>
// c.role === 'assistant' &&
// c.tool_calls?.some((tc) => tc.function?.name === SubAppIds.plan)
// );
// const originalContent =
// lastPlanCallIndex !== -1 ? (context[lastPlanCallIndex + 1].content as string) : '';
// const applyedContent = applyDiff({
// original: originalContent,
// patch: content
// });
// // workflowStreamResponse?.({
// // event: SseResponseEventEnum.toolResponse,
// // data: {
// // tool: {
// // id: call.id,
// // toolName: '',
// // toolAvatar: '',
// // params: '',
// // response: sliceStrStartEnd(applyedContent, 5000, 5000)
// // }
// // }
// // });
// return {
// response: content,
// usages: [],
// isEnd: false
// };
// } else if (toolId === SubAppIds.model) {
// const { systemPrompt, task } = parseToolArgs<{ systemPrompt: string; task: string }>(
// call.function.arguments
// );
// const { content, inputTokens, outputTokens } = await transferModelAgent({
// model,
// systemPrompt,
// task,
// onStreaming({ text, fullText }) {
// if (!fullText) return;
// workflowStreamResponse?.({
// event: SseResponseEventEnum.toolResponse,
// data: {
// tool: {
// id: call.id,
// toolName: '',
// toolAvatar: '',
// params: '',
// response: sliceStrStartEnd(fullText, 5000, 5000)
// }
// }
// });
// }
// });
// return {
// response: content,
// usages: [],
// isEnd: false
// };
// }
// Push stream response
workflowStreamResponse?.({

View File

@@ -5,55 +5,37 @@ import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { chats2GPTMessages, getSystemPrompt_ChatItemType } from '@fastgpt/global/core/chat/adapt';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
import type { DispatchSubAppProps, DispatchSubAppResponse } from '../../type';
type ModelAgentConfig = {
model: string;
temperature?: number;
top_p?: number;
stream?: boolean;
};
type transferModelAgentProps = {
systemPrompt?: string;
task?: string;
} & ModelAgentConfig &
Pick<ResponseEvents, 'onStreaming' | 'onReasoning'>;
};
export async function transferModelAgent({
systemPrompt = '',
task = '',
type dispatchModelAgentProps = DispatchSubAppProps<ModelAgentConfig>;
onStreaming,
onReasoning,
export async function dispatchModelAgent({
messages,
onStream,
params
}: dispatchModelAgentProps): Promise<DispatchSubAppResponse> {
const { model, temperature, top_p, stream, systemPrompt, task } = params;
model,
temperature = 0.7,
top_p,
stream = true
}: transferModelAgentProps): Promise<{
content: string;
inputTokens: number;
outputTokens: number;
}> {
try {
const messages: ChatItemType[] = [
...getSystemPrompt_ChatItemType(systemPrompt),
const context: ChatCompletionMessageParam[] = [
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: task
}
}
]
role: 'system',
content: systemPrompt ?? ''
},
{
role: 'user',
content: task ?? ''
}
];
const adaptedMessages: ChatCompletionMessageParam[] = chats2GPTMessages({
messages,
reserveId: false
});
const {
answerText,
@@ -62,26 +44,23 @@ export async function transferModelAgent({
body: {
model,
temperature,
messages: adaptedMessages,
messages: context,
top_p,
stream
},
onStreaming,
onReasoning
onStreaming: onStream
});
return {
content: answerText,
inputTokens,
outputTokens
response: answerText,
usages: undefined
};
} catch (error) {
const err = getErrText(error);
addLog.warn('call model_agent failed');
return {
content: err,
inputTokens: 0,
outputTokens: 0
response: err,
usages: undefined
};
}
}

View File

@@ -1,12 +1,10 @@
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import { addLog } from '../../../../../../common/system/log';
import { createLLMResponse, type ResponseEvents } from '../../../../../ai/llm/request';
import { addLog } from '../../../../../../../common/system/log';
import { createLLMResponse, type ResponseEvents } from '../../../../../../ai/llm/request';
import { defaultPlanAgentPrompt } from './prompt';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { chats2GPTMessages, getSystemPrompt_ChatItemType } from '@fastgpt/global/core/chat/adapt';
import type { ChatItemType } from '@fastgpt/global/core/chat/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
import type { DispatchSubAppProps, DispatchSubAppResponse } from '../../type';
type PlanAgentConfig = {
model: string;
@@ -14,55 +12,29 @@ type PlanAgentConfig = {
temperature?: number;
top_p?: number;
stream?: boolean;
instruction?: string;
};
type transferPlanAgentProps = {
histories: ChatItemType[];
instruction?: string;
} & PlanAgentConfig &
Pick<ResponseEvents, 'onStreaming' | 'onReasoning'>;
type dispatchPlanAgentProps = DispatchSubAppProps<PlanAgentConfig>;
export async function transferPlanAgent({
instruction = '',
histories,
export const dispatchPlanAgent = async ({
messages,
onStream,
params
}: dispatchPlanAgentProps): Promise<DispatchSubAppResponse> => {
const { model, customSystemPrompt, temperature, top_p, stream, instruction } = params;
onStreaming,
onReasoning,
model,
customSystemPrompt,
temperature = 0,
top_p,
stream = true
}: transferPlanAgentProps): Promise<{
content: string;
inputTokens: number;
outputTokens: number;
}> {
try {
const messages: ChatItemType[] = [
...getSystemPrompt_ChatItemType(
replaceVariable(defaultPlanAgentPrompt, {
const combinedMessages: ChatCompletionMessageParam[] = [
{
role: 'system',
content: replaceVariable(defaultPlanAgentPrompt, {
userRole: customSystemPrompt
})
),
...histories,
{
obj: ChatRoleEnum.Human,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: instruction
}
}
]
}
},
...messages,
{ role: 'user', content: instruction ?? '' }
];
const adaptedMessages: ChatCompletionMessageParam[] = chats2GPTMessages({
messages,
reserveId: false
});
const {
answerText,
@@ -71,26 +43,23 @@ export async function transferPlanAgent({
body: {
model,
temperature,
messages: adaptedMessages,
messages: combinedMessages,
top_p,
stream
},
onStreaming,
onReasoning
onStreaming: onStream
});
return {
content: answerText,
inputTokens,
outputTokens
response: answerText,
usages: undefined
};
} catch (error) {
const err = getErrText(error);
addLog.warn('call plan_agent failed');
return {
content: err,
inputTokens: 0,
outputTokens: 0
response: err,
usages: undefined
};
}
}
};

View File

@@ -1,7 +1,7 @@
import type { DispatchSubAppProps, DispatchSubAppResponse } from '../../type';
export const dispatchStopToolCall = async (
props: DispatchSubAppProps<{}>
props?: DispatchSubAppProps<{}>
): Promise<DispatchSubAppResponse> => {
return {
response: ''