perf: workflow response field (#5443)

This commit is contained in:
Archer
2025-08-13 14:29:13 +08:00
committed by GitHub
parent 83aa3a855f
commit ad550f4444
13 changed files with 50 additions and 37 deletions

View File

@@ -99,6 +99,7 @@ description: FastGPT 文档目录
- [/docs/upgrading/4-11/4110](/docs/upgrading/4-11/4110) - [/docs/upgrading/4-11/4110](/docs/upgrading/4-11/4110)
- [/docs/upgrading/4-11/4111](/docs/upgrading/4-11/4111) - [/docs/upgrading/4-11/4111](/docs/upgrading/4-11/4111)
- [/docs/upgrading/4-12/4120](/docs/upgrading/4-12/4120) - [/docs/upgrading/4-12/4120](/docs/upgrading/4-12/4120)
- [/docs/upgrading/4-12/4121](/docs/upgrading/4-12/4121)
- [/docs/upgrading/4-8/40](/docs/upgrading/4-8/40) - [/docs/upgrading/4-8/40](/docs/upgrading/4-8/40)
- [/docs/upgrading/4-8/41](/docs/upgrading/4-8/41) - [/docs/upgrading/4-8/41](/docs/upgrading/4-8/41)
- [/docs/upgrading/4-8/42](/docs/upgrading/4-8/42) - [/docs/upgrading/4-8/42](/docs/upgrading/4-8/42)

View File

@@ -0,0 +1,17 @@
---
title: 'V4.12.1(进行中)'
description: 'FastGPT V4.12.1 更新说明'
---
## 🚀 新增内容
## ⚙️ 优化
1. 工作流响应优化,主动指定响应值进入历史记录,而不是根据 key 决定。
## 🐛 修复
## 🔨 工具更新

View File

@@ -1,5 +1,5 @@
{ {
"title": "4.12.x", "title": "4.12.x",
"description": "", "description": "",
"pages": ["4120"] "pages": ["4121", "4120"]
} }

View File

@@ -102,7 +102,7 @@
"document/content/docs/upgrading/4-10/4101.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-10/4101.mdx": "2025-08-02T19:38:37+08:00",
"document/content/docs/upgrading/4-11/4110.mdx": "2025-08-05T23:20:39+08:00", "document/content/docs/upgrading/4-11/4110.mdx": "2025-08-05T23:20:39+08:00",
"document/content/docs/upgrading/4-11/4111.mdx": "2025-08-07T22:49:09+08:00", "document/content/docs/upgrading/4-11/4111.mdx": "2025-08-07T22:49:09+08:00",
"document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:22:18+08:00", "document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:45:19+08:00",
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00", "document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",

View File

@@ -18,6 +18,9 @@ export enum SseResponseEventEnum {
} }
export enum DispatchNodeResponseKeyEnum { export enum DispatchNodeResponseKeyEnum {
answerText = 'answerText', // answer text
reasoningText = 'reasoningText', // reasoning text
skipHandleId = 'skipHandleId', // skip handle id skipHandleId = 'skipHandleId', // skip handle id
nodeResponse = 'responseData', // run node response nodeResponse = 'responseData', // run node response
nodeDispatchUsages = 'nodeDispatchUsages', // the node bill. nodeDispatchUsages = 'nodeDispatchUsages', // the node bill.

View File

@@ -253,6 +253,8 @@ export type DispatchNodeResponseType = {
}; };
export type DispatchNodeResultType<T = {}, ERR = { [NodeOutputKeyEnum.errorText]?: string }> = { export type DispatchNodeResultType<T = {}, ERR = { [NodeOutputKeyEnum.errorText]?: string }> = {
[DispatchNodeResponseKeyEnum.answerText]?: string;
[DispatchNodeResponseKeyEnum.reasoningText]?: string;
[DispatchNodeResponseKeyEnum.skipHandleId]?: string[]; // skip some edge handle id [DispatchNodeResponseKeyEnum.skipHandleId]?: string[]; // skip some edge handle id
[DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail [DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; // Node total usage [DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; // Node total usage

View File

@@ -99,6 +99,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
answerText: text, answerText: text,
history: completeMessages history: completeMessages
}, },
[DispatchNodeResponseKeyEnum.answerText]: text,
assistantResponses, assistantResponses,
system_memories, system_memories,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {

View File

@@ -332,12 +332,16 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
modelType: ModelTypeEnum.llm modelType: ModelTypeEnum.llm
}); });
const trimAnswer = answerText.trim();
return { return {
data: { data: {
answerText: answerText.trim(), answerText: trimAnswer,
reasoningText, reasoningText,
history: chatCompleteMessages history: chatCompleteMessages
}, },
[DispatchNodeResponseKeyEnum.answerText]: isResponseAnswerText ? trimAnswer : undefined,
[DispatchNodeResponseKeyEnum.reasoningText]: aiChatReasoning ? reasoningText : undefined,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints, totalPoints: externalProvider.openaiAccount?.key ? 0 : totalPoints,
model: modelName, model: modelName,

View File

@@ -177,6 +177,7 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
[NodeOutputKeyEnum.answerText]: text, [NodeOutputKeyEnum.answerText]: text,
[NodeOutputKeyEnum.history]: completeMessages [NodeOutputKeyEnum.history]: completeMessages
}, },
[DispatchNodeResponseKeyEnum.answerText]: text,
system_memories, system_memories,
[DispatchNodeResponseKeyEnum.interactive]: workflowInteractiveResponse [DispatchNodeResponseKeyEnum.interactive]: workflowInteractiveResponse
? { ? {

View File

@@ -83,6 +83,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
}; };
const formatToolId = tool.id.split('-')[1]; const formatToolId = tool.id.split('-')[1];
let answerText = '';
const res = await APIRunSystemTool({ const res = await APIRunSystemTool({
toolId: formatToolId, toolId: formatToolId,
@@ -109,6 +110,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
}, },
onMessage: ({ type, content }) => { onMessage: ({ type, content }) => {
if (workflowStreamResponse && content) { if (workflowStreamResponse && content) {
answerText = content;
workflowStreamResponse({ workflowStreamResponse({
event: type as unknown as SseResponseEventEnum, event: type as unknown as SseResponseEventEnum,
data: textAdaptGptResponse({ data: textAdaptGptResponse({
@@ -169,6 +171,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
return { return {
data: result, data: result,
[DispatchNodeResponseKeyEnum.answerText]: answerText,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {
toolRes: result, toolRes: result,
moduleLogo: avatar, moduleLogo: avatar,

View File

@@ -125,8 +125,6 @@ type Props = ChatDispatchProps & {
runtimeEdges: RuntimeEdgeItemType[]; runtimeEdges: RuntimeEdgeItemType[];
}; };
type NodeResponseType = DispatchNodeResultType<{ type NodeResponseType = DispatchNodeResultType<{
[NodeOutputKeyEnum.answerText]?: string;
[NodeOutputKeyEnum.reasoningText]?: string;
[key: string]: any; [key: string]: any;
}>; }>;
type NodeResponseCompleteType = Omit<NodeResponseType, 'responseData'> & { type NodeResponseCompleteType = Omit<NodeResponseType, 'responseData'> & {
@@ -234,7 +232,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
function pushStore( function pushStore(
{ inputs = [] }: RuntimeNodeItemType, { inputs = [] }: RuntimeNodeItemType,
{ {
data: { answerText = '', reasoningText } = {}, answerText,
reasoningText,
responseData, responseData,
nodeDispatchUsages, nodeDispatchUsages,
toolResponses, toolResponses,
@@ -279,30 +278,20 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
chatAssistantResponse = chatAssistantResponse.concat(assistantResponses); chatAssistantResponse = chatAssistantResponse.concat(assistantResponses);
} else { } else {
if (reasoningText) { if (reasoningText) {
const isResponseReasoningText = inputs.find( chatAssistantResponse.push({
(item) => item.key === NodeInputKeyEnum.aiChatReasoning type: ChatItemValueTypeEnum.reasoning,
)?.value; reasoning: {
if (isResponseReasoningText) { content: reasoningText
chatAssistantResponse.push({ }
type: ChatItemValueTypeEnum.reasoning, });
reasoning: {
content: reasoningText
}
});
}
} }
if (answerText) { if (answerText) {
// save assistant text response chatAssistantResponse.push({
const isResponseAnswerText = type: ChatItemValueTypeEnum.text,
inputs.find((item) => item.key === NodeInputKeyEnum.aiChatIsResponseText)?.value ?? true; text: {
if (isResponseAnswerText) { content: answerText
chatAssistantResponse.push({ }
type: ChatItemValueTypeEnum.text, });
text: {
content: answerText
}
});
}
} }
} }

View File

@@ -33,6 +33,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
data: { data: {
[NodeOutputKeyEnum.answerText]: responseText [NodeOutputKeyEnum.answerText]: responseText
}, },
[DispatchNodeResponseKeyEnum.answerText]: responseText,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {
textOutput: formatText textOutput: formatText
} }

View File

@@ -344,15 +344,6 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
})(); })();
}); });
if (typeof formatResponse[NodeOutputKeyEnum.answerText] === 'string') {
workflowStreamResponse?.({
event: SseResponseEventEnum.fastAnswer,
data: textAdaptGptResponse({
text: formatResponse[NodeOutputKeyEnum.answerText]
})
});
}
return { return {
data: { data: {
[NodeOutputKeyEnum.httpRawResponse]: rawResponse, [NodeOutputKeyEnum.httpRawResponse]: rawResponse,