mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-15 15:41:05 +00:00
Fix workflow (#5592)
* fix: fileselector default * fix: workflow run process
This commit is contained in:
@@ -14,5 +14,7 @@ description: 'FastGPT V4.12.3 更新说明'
|
|||||||
## 🐛 修复
|
## 🐛 修复
|
||||||
|
|
||||||
1. 单团队模式下,如果用户离开,则无法重新进入团队。
|
1. 单团队模式下,如果用户离开,则无法重新进入团队。
|
||||||
|
2. 工作流文件上传默认打开,但输入侧未添加文件输出。
|
||||||
|
3. 连续用户选择,分支无法正常运行。
|
||||||
|
|
||||||
## 🔨 工具更新
|
## 🔨 工具更新
|
||||||
|
@@ -105,7 +105,7 @@
|
|||||||
"document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:45:19+08:00",
|
"document/content/docs/upgrading/4-12/4120.mdx": "2025-08-12T22:45:19+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4121.mdx": "2025-08-15T22:53:06+08:00",
|
"document/content/docs/upgrading/4-12/4121.mdx": "2025-08-15T22:53:06+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4122.mdx": "2025-08-27T00:31:33+08:00",
|
"document/content/docs/upgrading/4-12/4122.mdx": "2025-08-27T00:31:33+08:00",
|
||||||
"document/content/docs/upgrading/4-12/4123.mdx": "2025-08-29T01:24:19+08:00",
|
"document/content/docs/upgrading/4-12/4123.mdx": "2025-09-04T13:48:03+08:00",
|
||||||
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",
|
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",
|
||||||
|
@@ -45,7 +45,7 @@ export const defaultChatInputGuideConfig = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const defaultAppSelectFileConfig: AppFileSelectConfigType = {
|
export const defaultAppSelectFileConfig: AppFileSelectConfigType = {
|
||||||
canSelectFile: true,
|
canSelectFile: false,
|
||||||
canSelectImg: false,
|
canSelectImg: false,
|
||||||
maxFiles: 10
|
maxFiles: 10
|
||||||
};
|
};
|
||||||
|
@@ -8,6 +8,7 @@ type InteractiveBasicType = {
|
|||||||
entryNodeIds: string[];
|
entryNodeIds: string[];
|
||||||
memoryEdges: RuntimeEdgeItemType[];
|
memoryEdges: RuntimeEdgeItemType[];
|
||||||
nodeOutputs: NodeOutputItemType[];
|
nodeOutputs: NodeOutputItemType[];
|
||||||
|
skipNodeQueue?: { id: string; skippedNodeIdList: string[] }[]; // 需要记录目前在 queue 里的节点
|
||||||
toolParams?: {
|
toolParams?: {
|
||||||
entryNodeIds: string[]; // 记录工具中,交互节点的 Id,而不是起始工作流的入口
|
entryNodeIds: string[]; // 记录工具中,交互节点的 Id,而不是起始工作流的入口
|
||||||
memoryMessages: ChatCompletionMessageParam[]; // 这轮工具中,产生的新的 messages
|
memoryMessages: ChatCompletionMessageParam[]; // 这轮工具中,产生的新的 messages
|
||||||
|
@@ -42,7 +42,7 @@ import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edg
|
|||||||
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||||
import { addLog } from '../../../common/system/log';
|
import { addLog } from '../../../common/system/log';
|
||||||
import { surrenderProcess } from '../../../common/system/tools';
|
import { surrenderProcess } from '../../../common/system/tools';
|
||||||
import type { DispatchFlowResponse } from './type';
|
import type { DispatchFlowResponse, WorkflowDebugResponse } from './type';
|
||||||
import { removeSystemVariable, rewriteRuntimeWorkFlow } from './utils';
|
import { removeSystemVariable, rewriteRuntimeWorkFlow } from './utils';
|
||||||
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
|
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
|
||||||
import { callbackMap } from './constants';
|
import { callbackMap } from './constants';
|
||||||
@@ -50,6 +50,7 @@ import { callbackMap } from './constants';
|
|||||||
type Props = Omit<ChatDispatchProps, 'workflowDispatchDeep'> & {
|
type Props = Omit<ChatDispatchProps, 'workflowDispatchDeep'> & {
|
||||||
runtimeNodes: RuntimeNodeItemType[];
|
runtimeNodes: RuntimeNodeItemType[];
|
||||||
runtimeEdges: RuntimeEdgeItemType[];
|
runtimeEdges: RuntimeEdgeItemType[];
|
||||||
|
defaultSkipNodeQueue?: WorkflowDebugResponse['skipNodeQueue'];
|
||||||
};
|
};
|
||||||
type NodeResponseType = DispatchNodeResultType<{
|
type NodeResponseType = DispatchNodeResultType<{
|
||||||
[key: string]: any;
|
[key: string]: any;
|
||||||
@@ -100,6 +101,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
|||||||
// Init some props
|
// Init some props
|
||||||
return runWorkflow({
|
return runWorkflow({
|
||||||
...data,
|
...data,
|
||||||
|
defaultSkipNodeQueue: data.lastInteractive?.skipNodeQueue || data.defaultSkipNodeQueue,
|
||||||
variables: defaultVariables,
|
variables: defaultVariables,
|
||||||
workflowDispatchDeep: 0
|
workflowDispatchDeep: 0
|
||||||
}).finally(() => {
|
}).finally(() => {
|
||||||
@@ -112,12 +114,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
|||||||
type RunWorkflowProps = ChatDispatchProps & {
|
type RunWorkflowProps = ChatDispatchProps & {
|
||||||
runtimeNodes: RuntimeNodeItemType[];
|
runtimeNodes: RuntimeNodeItemType[];
|
||||||
runtimeEdges: RuntimeEdgeItemType[];
|
runtimeEdges: RuntimeEdgeItemType[];
|
||||||
|
defaultSkipNodeQueue?: WorkflowDebugResponse['skipNodeQueue'];
|
||||||
};
|
};
|
||||||
export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowResponse> => {
|
export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowResponse> => {
|
||||||
let {
|
let {
|
||||||
res,
|
res,
|
||||||
runtimeNodes = [],
|
runtimeNodes = [],
|
||||||
runtimeEdges = [],
|
runtimeEdges = [],
|
||||||
|
defaultSkipNodeQueue,
|
||||||
histories = [],
|
histories = [],
|
||||||
variables = {},
|
variables = {},
|
||||||
externalProvider,
|
externalProvider,
|
||||||
@@ -135,9 +139,10 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
flowResponses: [],
|
flowResponses: [],
|
||||||
flowUsages: [],
|
flowUsages: [],
|
||||||
debugResponse: {
|
debugResponse: {
|
||||||
finishedNodes: [],
|
memoryEdges: [],
|
||||||
finishedEdges: [],
|
entryNodeIds: [],
|
||||||
nextStepRunNodes: []
|
nodeResponses: {},
|
||||||
|
skipNodeQueue: []
|
||||||
},
|
},
|
||||||
[DispatchNodeResponseKeyEnum.runTimes]: 1,
|
[DispatchNodeResponseKeyEnum.runTimes]: 1,
|
||||||
[DispatchNodeResponseKeyEnum.assistantResponses]: [],
|
[DispatchNodeResponseKeyEnum.assistantResponses]: [],
|
||||||
@@ -151,6 +156,8 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
|
|
||||||
await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges, lang: data.lang });
|
await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges, lang: data.lang });
|
||||||
|
|
||||||
|
const isDebugMode = data.mode === 'debug';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
工作流队列控制
|
工作流队列控制
|
||||||
特点:
|
特点:
|
||||||
@@ -176,7 +183,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
|
chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
|
||||||
chatNodeUsages: ChatNodeUsageType[] = [];
|
chatNodeUsages: ChatNodeUsageType[] = [];
|
||||||
toolRunResponse: ToolRunResponseItemType; // Run with tool mode. Result will response to tool node.
|
toolRunResponse: ToolRunResponseItemType; // Run with tool mode. Result will response to tool node.
|
||||||
debugNextStepRunNodes: RuntimeNodeItemType[] = []; // 记录 Debug 模式下,下一个阶段需要执行的节点。
|
|
||||||
// 记录交互节点,交互节点需要在工作流完全结束后再进行计算
|
// 记录交互节点,交互节点需要在工作流完全结束后再进行计算
|
||||||
nodeInteractiveResponse:
|
nodeInteractiveResponse:
|
||||||
| {
|
| {
|
||||||
@@ -186,22 +192,38 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
| undefined;
|
| undefined;
|
||||||
system_memories: Record<string, any> = {}; // Workflow node memories
|
system_memories: Record<string, any> = {}; // Workflow node memories
|
||||||
|
|
||||||
|
// Debug
|
||||||
|
debugNextStepRunNodes: RuntimeNodeItemType[] = []; // 记录 Debug 模式下,下一个阶段需要执行的节点。
|
||||||
|
debugNodeResponses: WorkflowDebugResponse['nodeResponses'] = {};
|
||||||
|
|
||||||
// Queue variables
|
// Queue variables
|
||||||
private activeRunQueue = new Set<string>();
|
private activeRunQueue = new Set<string>();
|
||||||
private skipNodeQueue: { node: RuntimeNodeItemType; skippedNodeIdList: Set<string> }[] = [];
|
private skipNodeQueue = new Map<
|
||||||
|
string,
|
||||||
|
{ node: RuntimeNodeItemType; skippedNodeIdList: Set<string> }
|
||||||
|
>();
|
||||||
private runningNodeCount = 0;
|
private runningNodeCount = 0;
|
||||||
private maxConcurrency: number;
|
private maxConcurrency: number;
|
||||||
private resolve: (e: WorkflowQueue) => void;
|
private resolve: (e: WorkflowQueue) => void;
|
||||||
|
|
||||||
constructor({
|
constructor({
|
||||||
maxConcurrency = 10,
|
maxConcurrency = 10,
|
||||||
|
defaultSkipNodeQueue,
|
||||||
resolve
|
resolve
|
||||||
}: {
|
}: {
|
||||||
maxConcurrency?: number;
|
maxConcurrency?: number;
|
||||||
|
defaultSkipNodeQueue?: WorkflowDebugResponse['skipNodeQueue'];
|
||||||
resolve: (e: WorkflowQueue) => void;
|
resolve: (e: WorkflowQueue) => void;
|
||||||
}) {
|
}) {
|
||||||
this.maxConcurrency = maxConcurrency;
|
this.maxConcurrency = maxConcurrency;
|
||||||
this.resolve = resolve;
|
this.resolve = resolve;
|
||||||
|
|
||||||
|
// Init skip node queue
|
||||||
|
defaultSkipNodeQueue?.forEach(({ id, skippedNodeIdList }) => {
|
||||||
|
const node = this.runtimeNodesMap.get(id);
|
||||||
|
if (!node) return;
|
||||||
|
this.addSkipNode(node, new Set(skippedNodeIdList));
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add active node to queue (if already in the queue, it will not be added again)
|
// Add active node to queue (if already in the queue, it will not be added again)
|
||||||
@@ -217,7 +239,18 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
private processActiveNode() {
|
private processActiveNode() {
|
||||||
// Finish
|
// Finish
|
||||||
if (this.activeRunQueue.size === 0 && this.runningNodeCount === 0) {
|
if (this.activeRunQueue.size === 0 && this.runningNodeCount === 0) {
|
||||||
if (this.skipNodeQueue.length > 0 && !this.nodeInteractiveResponse) {
|
if (isDebugMode) {
|
||||||
|
// 没有下一个激活节点,说明debug 进入了一个“即将结束”状态。可以开始处理 skip 节点
|
||||||
|
if (this.debugNextStepRunNodes.length === 0 && this.skipNodeQueue.size > 0) {
|
||||||
|
this.processSkipNodes();
|
||||||
|
} else {
|
||||||
|
this.resolve(this);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 如果没有交互响应,则开始处理 skip(交互响应的 skip 需要留给后续处理)
|
||||||
|
if (this.skipNodeQueue.size > 0 && !this.nodeInteractiveResponse) {
|
||||||
this.processSkipNodes();
|
this.processSkipNodes();
|
||||||
} else {
|
} else {
|
||||||
this.resolve(this);
|
this.resolve(this);
|
||||||
@@ -251,11 +284,19 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
}
|
}
|
||||||
|
|
||||||
private addSkipNode(node: RuntimeNodeItemType, skippedNodeIdList: Set<string>) {
|
private addSkipNode(node: RuntimeNodeItemType, skippedNodeIdList: Set<string>) {
|
||||||
this.skipNodeQueue.push({ node, skippedNodeIdList });
|
// 保证一个node 只在queue里记录一次
|
||||||
|
const skipNodeSkippedNodeIdList =
|
||||||
|
this.skipNodeQueue.get(node.nodeId)?.skippedNodeIdList || new Set<string>();
|
||||||
|
|
||||||
|
const concatSkippedNodeIdList = new Set([...skippedNodeIdList, ...skipNodeSkippedNodeIdList]);
|
||||||
|
|
||||||
|
this.skipNodeQueue.set(node.nodeId, { node, skippedNodeIdList: concatSkippedNodeIdList });
|
||||||
}
|
}
|
||||||
private processSkipNodes() {
|
private processSkipNodes() {
|
||||||
const skipItem = this.skipNodeQueue.shift();
|
// 取一个 node,并且从队列里删除
|
||||||
|
const skipItem = this.skipNodeQueue.values().next().value;
|
||||||
if (skipItem) {
|
if (skipItem) {
|
||||||
|
this.skipNodeQueue.delete(skipItem.node.nodeId);
|
||||||
this.checkNodeCanRun(skipItem.node, skipItem.skippedNodeIdList).finally(() => {
|
this.checkNodeCanRun(skipItem.node, skipItem.skippedNodeIdList).finally(() => {
|
||||||
this.processActiveNode();
|
this.processActiveNode();
|
||||||
});
|
});
|
||||||
@@ -351,7 +392,7 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
runtimeNodes,
|
runtimeNodes,
|
||||||
runtimeEdges,
|
runtimeEdges,
|
||||||
params,
|
params,
|
||||||
mode: data.mode === 'debug' ? 'test' : data.mode
|
mode: isDebugMode ? 'test' : data.mode
|
||||||
};
|
};
|
||||||
|
|
||||||
// run module
|
// run module
|
||||||
@@ -620,18 +661,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
const nextStepActiveNodes = Array.from(nextStepActiveNodesMap.values());
|
const nextStepActiveNodes = Array.from(nextStepActiveNodesMap.values());
|
||||||
const nextStepSkipNodes = Array.from(nextStepSkipNodesMap.values());
|
const nextStepSkipNodes = Array.from(nextStepSkipNodesMap.values());
|
||||||
|
|
||||||
if (data.mode === 'debug') {
|
|
||||||
this.debugNextStepRunNodes = this.debugNextStepRunNodes.concat(
|
|
||||||
data.lastInteractive
|
|
||||||
? nextStepActiveNodes
|
|
||||||
: [...nextStepActiveNodes, ...nextStepSkipNodes]
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
nextStepActiveNodes: [],
|
|
||||||
nextStepSkipNodes: []
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
nextStepActiveNodes,
|
nextStepActiveNodes,
|
||||||
nextStepSkipNodes
|
nextStepSkipNodes
|
||||||
@@ -690,8 +719,31 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
return this.nodeRunWithSkip(node);
|
return this.nodeRunWithSkip(node);
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
if (!nodeRunResult) return;
|
if (!nodeRunResult) return;
|
||||||
|
|
||||||
|
// Store debug data
|
||||||
|
if (isDebugMode) {
|
||||||
|
if (status === 'run') {
|
||||||
|
this.debugNodeResponses[node.nodeId] = {
|
||||||
|
nodeId: node.nodeId,
|
||||||
|
type: 'run',
|
||||||
|
interactiveResponse: nodeRunResult.result[DispatchNodeResponseKeyEnum.interactive],
|
||||||
|
response: nodeRunResult.result[DispatchNodeResponseKeyEnum.nodeResponse]
|
||||||
|
};
|
||||||
|
} else if (status === 'skip') {
|
||||||
|
this.debugNodeResponses[node.nodeId] = {
|
||||||
|
nodeId: node.nodeId,
|
||||||
|
type: 'skip',
|
||||||
|
response: nodeRunResult.result[DispatchNodeResponseKeyEnum.nodeResponse]
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 如果一个节点 active 运行了,则需要把它从 skip queue 里删除
|
||||||
|
if (status === 'run') {
|
||||||
|
this.skipNodeQueue.delete(node.nodeId);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
特殊情况:
|
特殊情况:
|
||||||
通过 skipEdges 可以判断是运行了分支节点。
|
通过 skipEdges 可以判断是运行了分支节点。
|
||||||
@@ -704,22 +756,6 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
skippedNodeIdList.add(node.nodeId);
|
skippedNodeIdList.add(node.nodeId);
|
||||||
}
|
}
|
||||||
|
|
||||||
// In the current version, only one interactive node is allowed at the same time
|
|
||||||
const interactiveResponse = nodeRunResult.result?.[DispatchNodeResponseKeyEnum.interactive];
|
|
||||||
if (interactiveResponse) {
|
|
||||||
pushStore(nodeRunResult.result);
|
|
||||||
|
|
||||||
if (data.mode === 'debug') {
|
|
||||||
this.debugNextStepRunNodes = this.debugNextStepRunNodes.concat([nodeRunResult.node]);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.nodeInteractiveResponse = {
|
|
||||||
entryNodeIds: [nodeRunResult.node.nodeId],
|
|
||||||
interactiveResponse
|
|
||||||
};
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the node output at the end of the run and get the next nodes
|
// Update the node output at the end of the run and get the next nodes
|
||||||
const { nextStepActiveNodes, nextStepSkipNodes } = nodeOutput(
|
const { nextStepActiveNodes, nextStepSkipNodes } = nodeOutput(
|
||||||
nodeRunResult.node,
|
nodeRunResult.node,
|
||||||
@@ -730,10 +766,26 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
this.addSkipNode(node, skippedNodeIdList);
|
this.addSkipNode(node, skippedNodeIdList);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Run next nodes
|
// In the current version, only one interactive node is allowed at the same time
|
||||||
nextStepActiveNodes.forEach((node) => {
|
const interactiveResponse = nodeRunResult.result[DispatchNodeResponseKeyEnum.interactive];
|
||||||
this.addActiveNode(node.nodeId);
|
if (interactiveResponse) {
|
||||||
});
|
if (isDebugMode) {
|
||||||
|
this.debugNextStepRunNodes = this.debugNextStepRunNodes.concat([nodeRunResult.node]);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.nodeInteractiveResponse = {
|
||||||
|
entryNodeIds: [nodeRunResult.node.nodeId],
|
||||||
|
interactiveResponse
|
||||||
|
};
|
||||||
|
return;
|
||||||
|
} else if (isDebugMode) {
|
||||||
|
// Debug 模式下一步时候,会自己增加 activeNode
|
||||||
|
this.debugNextStepRunNodes = this.debugNextStepRunNodes.concat(nextStepActiveNodes);
|
||||||
|
} else {
|
||||||
|
nextStepActiveNodes.forEach((node) => {
|
||||||
|
this.addActiveNode(node.nodeId);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Have interactive result, computed edges and node outputs */
|
/* Have interactive result, computed edges and node outputs */
|
||||||
@@ -760,6 +812,10 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
|
|
||||||
const interactiveResult: WorkflowInteractiveResponseType = {
|
const interactiveResult: WorkflowInteractiveResponseType = {
|
||||||
...interactiveResponse,
|
...interactiveResponse,
|
||||||
|
skipNodeQueue: Array.from(this.skipNodeQueue.values()).map((item) => ({
|
||||||
|
id: item.node.nodeId,
|
||||||
|
skippedNodeIdList: Array.from(item.skippedNodeIdList)
|
||||||
|
})),
|
||||||
entryNodeIds,
|
entryNodeIds,
|
||||||
memoryEdges: runtimeEdges.map((edge) => ({
|
memoryEdges: runtimeEdges.map((edge) => ({
|
||||||
...edge,
|
...edge,
|
||||||
@@ -781,6 +837,22 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
interactive: interactiveResult
|
interactive: interactiveResult
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
getDebugResponse(): WorkflowDebugResponse {
|
||||||
|
const entryNodeIds = this.debugNextStepRunNodes.map((item) => item.nodeId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
memoryEdges: runtimeEdges.map((edge) => ({
|
||||||
|
...edge,
|
||||||
|
status: entryNodeIds.includes(edge.target) ? 'active' : edge.status
|
||||||
|
})),
|
||||||
|
entryNodeIds,
|
||||||
|
nodeResponses: this.debugNodeResponses,
|
||||||
|
skipNodeQueue: Array.from(this.skipNodeQueue.values()).map((item) => ({
|
||||||
|
id: item.node.nodeId,
|
||||||
|
skippedNodeIdList: Array.from(item.skippedNodeIdList)
|
||||||
|
}))
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start process width initInput
|
// Start process width initInput
|
||||||
@@ -799,7 +871,8 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
|
|
||||||
const workflowQueue = await new Promise<WorkflowQueue>((resolve) => {
|
const workflowQueue = await new Promise<WorkflowQueue>((resolve) => {
|
||||||
const workflowQueue = new WorkflowQueue({
|
const workflowQueue = new WorkflowQueue({
|
||||||
resolve
|
resolve,
|
||||||
|
defaultSkipNodeQueue
|
||||||
});
|
});
|
||||||
|
|
||||||
entryNodes.forEach((node) => {
|
entryNodes.forEach((node) => {
|
||||||
@@ -833,11 +906,7 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise<DispatchFlowR
|
|||||||
return {
|
return {
|
||||||
flowResponses: workflowQueue.chatResponses,
|
flowResponses: workflowQueue.chatResponses,
|
||||||
flowUsages: workflowQueue.chatNodeUsages,
|
flowUsages: workflowQueue.chatNodeUsages,
|
||||||
debugResponse: {
|
debugResponse: workflowQueue.getDebugResponse(),
|
||||||
finishedNodes: runtimeNodes,
|
|
||||||
finishedEdges: runtimeEdges,
|
|
||||||
nextStepRunNodes: workflowQueue.debugNextStepRunNodes
|
|
||||||
},
|
|
||||||
workflowInteractiveResponse: interactiveResult,
|
workflowInteractiveResponse: interactiveResult,
|
||||||
[DispatchNodeResponseKeyEnum.runTimes]: workflowQueue.workflowRunTimes,
|
[DispatchNodeResponseKeyEnum.runTimes]: workflowQueue.workflowRunTimes,
|
||||||
[DispatchNodeResponseKeyEnum.assistantResponses]: mergeAssistantResponseAnswerText(
|
[DispatchNodeResponseKeyEnum.assistantResponses]: mergeAssistantResponseAnswerText(
|
||||||
|
@@ -13,14 +13,24 @@ import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workf
|
|||||||
import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
||||||
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
import type { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
|
||||||
|
|
||||||
|
export type WorkflowDebugResponse = {
|
||||||
|
memoryEdges: RuntimeEdgeItemType[];
|
||||||
|
entryNodeIds: string[]; // Next step entry nodes
|
||||||
|
nodeResponses: Record<
|
||||||
|
string,
|
||||||
|
{
|
||||||
|
nodeId: string;
|
||||||
|
type: 'skip' | 'run';
|
||||||
|
response?: ChatHistoryItemResType;
|
||||||
|
interactiveResponse?: InteractiveNodeResponseType;
|
||||||
|
}
|
||||||
|
>;
|
||||||
|
skipNodeQueue?: { id: string; skippedNodeIdList: string[] }[]; // Cache
|
||||||
|
};
|
||||||
export type DispatchFlowResponse = {
|
export type DispatchFlowResponse = {
|
||||||
flowResponses: ChatHistoryItemResType[];
|
flowResponses: ChatHistoryItemResType[];
|
||||||
flowUsages: ChatNodeUsageType[];
|
flowUsages: ChatNodeUsageType[];
|
||||||
debugResponse: {
|
debugResponse: WorkflowDebugResponse;
|
||||||
finishedNodes: RuntimeNodeItemType[];
|
|
||||||
finishedEdges: RuntimeEdgeItemType[];
|
|
||||||
nextStepRunNodes: RuntimeNodeItemType[];
|
|
||||||
};
|
|
||||||
workflowInteractiveResponse?: WorkflowInteractiveResponseType;
|
workflowInteractiveResponse?: WorkflowInteractiveResponseType;
|
||||||
[DispatchNodeResponseKeyEnum.toolResponses]: ToolRunResponseItemType;
|
[DispatchNodeResponseKeyEnum.toolResponses]: ToolRunResponseItemType;
|
||||||
[DispatchNodeResponseKeyEnum.assistantResponses]: AIChatItemValueItemType[];
|
[DispatchNodeResponseKeyEnum.assistantResponses]: AIChatItemValueItemType[];
|
||||||
|
@@ -17,7 +17,6 @@ import { useTranslation } from 'next-i18next';
|
|||||||
import type { AppFileSelectConfigType } from '@fastgpt/global/core/app/type.d';
|
import type { AppFileSelectConfigType } from '@fastgpt/global/core/app/type.d';
|
||||||
import MyModal from '@fastgpt/web/components/common/MyModal';
|
import MyModal from '@fastgpt/web/components/common/MyModal';
|
||||||
import MySlider from '@/components/Slider';
|
import MySlider from '@/components/Slider';
|
||||||
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
|
|
||||||
import ChatFunctionTip from './Tip';
|
import ChatFunctionTip from './Tip';
|
||||||
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
||||||
import { useMount } from 'ahooks';
|
import { useMount } from 'ahooks';
|
||||||
@@ -25,6 +24,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
|
|||||||
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
||||||
import MyTag from '@fastgpt/web/components/common/Tag/index';
|
import MyTag from '@fastgpt/web/components/common/Tag/index';
|
||||||
import MyDivider from '@fastgpt/web/components/common/MyDivider';
|
import MyDivider from '@fastgpt/web/components/common/MyDivider';
|
||||||
|
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
|
||||||
|
|
||||||
const FileSelect = ({
|
const FileSelect = ({
|
||||||
forbidVision = false,
|
forbidVision = false,
|
||||||
|
@@ -5,21 +5,18 @@ import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workf
|
|||||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
|
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
|
||||||
import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
||||||
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
||||||
|
import type { WorkflowDebugResponse } from '@fastgpt/service/core/workflow/dispatch/type';
|
||||||
|
|
||||||
export type PostWorkflowDebugProps = {
|
export type PostWorkflowDebugProps = {
|
||||||
nodes: RuntimeNodeItemType[];
|
nodes: RuntimeNodeItemType[];
|
||||||
edges: RuntimeEdgeItemType[];
|
edges: RuntimeEdgeItemType[];
|
||||||
|
skipNodeQueue?: WorkflowDebugResponse['skipNodeQueue'];
|
||||||
variables: Record<string, any>;
|
variables: Record<string, any>;
|
||||||
appId: string;
|
appId: string;
|
||||||
query?: UserChatItemValueItemType[];
|
query?: UserChatItemValueItemType[];
|
||||||
history?: ChatItemType[];
|
history?: ChatItemType[];
|
||||||
};
|
};
|
||||||
|
|
||||||
export type PostWorkflowDebugResponse = {
|
export type PostWorkflowDebugResponse = WorkflowDebugResponse & {
|
||||||
finishedNodes: RuntimeNodeItemType[];
|
|
||||||
finishedEdges: RuntimeEdgeItemType[];
|
|
||||||
nextStepRunNodes: RuntimeNodeItemType[];
|
|
||||||
flowResponses: ChatHistoryItemResType[];
|
|
||||||
workflowInteractiveResponse?: WorkflowInteractiveResponseType;
|
|
||||||
newVariables: Record<string, any>;
|
newVariables: Record<string, any>;
|
||||||
};
|
};
|
||||||
|
@@ -31,7 +31,6 @@ import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
|||||||
import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip';
|
import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip';
|
||||||
import { getWebLLMModel } from '@/web/common/system/utils';
|
import { getWebLLMModel } from '@/web/common/system/utils';
|
||||||
import ToolSelect from './components/ToolSelect';
|
import ToolSelect from './components/ToolSelect';
|
||||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
|
||||||
import OptimizerPopover from '@/components/common/PromptEditor/OptimizerPopover';
|
import OptimizerPopover from '@/components/common/PromptEditor/OptimizerPopover';
|
||||||
|
|
||||||
const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal'));
|
const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal'));
|
||||||
@@ -148,7 +147,7 @@ const EditForm = ({
|
|||||||
},
|
},
|
||||||
[appForm.aiSettings.systemPrompt, setAppForm]
|
[appForm.aiSettings.systemPrompt, setAppForm]
|
||||||
);
|
);
|
||||||
|
console.log(appForm.chatConfig.fileSelectConfig);
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Box>
|
<Box>
|
||||||
|
@@ -11,6 +11,7 @@ import { type SimpleAppSnapshotType, useSimpleAppSnapshots } from './useSnapshot
|
|||||||
import { useDebounceEffect, useMount } from 'ahooks';
|
import { useDebounceEffect, useMount } from 'ahooks';
|
||||||
import { v1Workflow2V2 } from '@/web/core/workflow/adapt';
|
import { v1Workflow2V2 } from '@/web/core/workflow/adapt';
|
||||||
import { getAppConfigByDiff } from '@/web/core/app/diff';
|
import { getAppConfigByDiff } from '@/web/core/app/diff';
|
||||||
|
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
|
||||||
|
|
||||||
const Edit = dynamic(() => import('./Edit'));
|
const Edit = dynamic(() => import('./Edit'));
|
||||||
const Logs = dynamic(() => import('../Logs/index'));
|
const Logs = dynamic(() => import('../Logs/index'));
|
||||||
@@ -79,7 +80,13 @@ const SimpleEdit = () => {
|
|||||||
if (past.length === 0) {
|
if (past.length === 0) {
|
||||||
const appForm = appWorkflow2Form({
|
const appForm = appWorkflow2Form({
|
||||||
nodes: appDetail.modules,
|
nodes: appDetail.modules,
|
||||||
chatConfig: appDetail.chatConfig
|
chatConfig: {
|
||||||
|
...appDetail.chatConfig,
|
||||||
|
fileSelectConfig: appDetail.chatConfig.fileSelectConfig || {
|
||||||
|
...defaultAppSelectFileConfig,
|
||||||
|
canSelectFile: true
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
saveSnapshot({
|
saveSnapshot({
|
||||||
appForm,
|
appForm,
|
||||||
|
@@ -127,11 +127,9 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
|
|||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
const lastInteractive = getLastInteractiveValue(mockHistory);
|
|
||||||
onNextNodeDebug({
|
onNextNodeDebug({
|
||||||
...workflowDebugData,
|
...workflowDebugData,
|
||||||
// Rewrite runtimeEdges
|
runtimeEdges: workflowDebugData.runtimeEdges,
|
||||||
runtimeEdges: storeEdges2RuntimeEdges(workflowDebugData.runtimeEdges, lastInteractive),
|
|
||||||
query: updatedQuery,
|
query: updatedQuery,
|
||||||
history: mockHistory
|
history: mockHistory
|
||||||
});
|
});
|
||||||
@@ -189,7 +187,7 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
|
|||||||
<Box fontWeight={'bold'} flex={'1'}>
|
<Box fontWeight={'bold'} flex={'1'}>
|
||||||
{t('common:core.workflow.debug.Run result')}
|
{t('common:core.workflow.debug.Run result')}
|
||||||
</Box>
|
</Box>
|
||||||
{workflowDebugData?.nextRunNodes.length !== 0 && (
|
{workflowDebugData?.entryNodeIds.length !== 0 && (
|
||||||
<PopoverConfirm
|
<PopoverConfirm
|
||||||
Trigger={
|
Trigger={
|
||||||
<Button
|
<Button
|
||||||
@@ -209,8 +207,8 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
|
|||||||
<>
|
<>
|
||||||
{(debugResult.status === 'success' || debugResult.status === 'skipped') &&
|
{(debugResult.status === 'success' || debugResult.status === 'skipped') &&
|
||||||
!debugResult.isExpired &&
|
!debugResult.isExpired &&
|
||||||
workflowDebugData?.nextRunNodes &&
|
workflowDebugData?.entryNodeIds &&
|
||||||
workflowDebugData.nextRunNodes.length > 0 && (
|
workflowDebugData.entryNodeIds.length > 0 && (
|
||||||
<Button
|
<Button
|
||||||
ml={2}
|
ml={2}
|
||||||
size={'sm'}
|
size={'sm'}
|
||||||
@@ -221,8 +219,8 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
|
|||||||
{t('common:next_step')}
|
{t('common:next_step')}
|
||||||
</Button>
|
</Button>
|
||||||
)}
|
)}
|
||||||
{workflowDebugData?.nextRunNodes &&
|
{workflowDebugData?.entryNodeIds &&
|
||||||
workflowDebugData?.nextRunNodes.length === 0 && (
|
workflowDebugData?.entryNodeIds.length === 0 && (
|
||||||
<Button ml={2} size={'sm'} variant={'primary'} onClick={onStopNodeDebug}>
|
<Button ml={2} size={'sm'} variant={'primary'} onClick={onStopNodeDebug}>
|
||||||
{t('common:core.workflow.debug.Done')}
|
{t('common:core.workflow.debug.Done')}
|
||||||
</Button>
|
</Button>
|
||||||
|
@@ -54,12 +54,12 @@ import { cloneDeep } from 'lodash';
|
|||||||
import { type AppVersionSchemaType } from '@fastgpt/global/core/app/version';
|
import { type AppVersionSchemaType } from '@fastgpt/global/core/app/version';
|
||||||
import WorkflowInitContextProvider, { WorkflowNodeEdgeContext } from './workflowInitContext';
|
import WorkflowInitContextProvider, { WorkflowNodeEdgeContext } from './workflowInitContext';
|
||||||
import WorkflowEventContextProvider from './workflowEventContext';
|
import WorkflowEventContextProvider from './workflowEventContext';
|
||||||
import { getAppConfigByDiff } from '@/web/core/app/diff';
|
|
||||||
import WorkflowStatusContextProvider from './workflowStatusContext';
|
import WorkflowStatusContextProvider from './workflowStatusContext';
|
||||||
import { type ChatItemType, type UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
import { type ChatItemType, type UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||||
import { type WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
import { type WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||||
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||||
import { useChatStore } from '@/web/core/chat/context/useChatStore';
|
import { useChatStore } from '@/web/core/chat/context/useChatStore';
|
||||||
|
import type { WorkflowDebugResponse } from '@fastgpt/service/core/workflow/dispatch/type';
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Context
|
Context
|
||||||
@@ -266,7 +266,9 @@ type WorkflowContextType = {
|
|||||||
export type DebugDataType = {
|
export type DebugDataType = {
|
||||||
runtimeNodes: RuntimeNodeItemType[];
|
runtimeNodes: RuntimeNodeItemType[];
|
||||||
runtimeEdges: RuntimeEdgeItemType[];
|
runtimeEdges: RuntimeEdgeItemType[];
|
||||||
nextRunNodes: RuntimeNodeItemType[];
|
entryNodeIds: string[];
|
||||||
|
skipNodeQueue?: WorkflowDebugResponse['skipNodeQueue'];
|
||||||
|
|
||||||
variables: Record<string, any>;
|
variables: Record<string, any>;
|
||||||
history?: ChatItemType[];
|
history?: ChatItemType[];
|
||||||
query?: UserChatItemValueItemType[];
|
query?: UserChatItemValueItemType[];
|
||||||
@@ -686,112 +688,67 @@ const WorkflowContextProvider = ({
|
|||||||
}))
|
}))
|
||||||
);
|
);
|
||||||
|
|
||||||
// 2. Set isEntry field and get entryNodes
|
// 2. Set isEntry field and get entryNodes, and set running status
|
||||||
const runtimeNodes = debugData.runtimeNodes.map((item) => ({
|
const runtimeNodes = debugData.runtimeNodes.map((item) => ({
|
||||||
...item,
|
...item,
|
||||||
isEntry: debugData.nextRunNodes.some((node) => node.nodeId === item.nodeId)
|
isEntry: debugData.entryNodeIds.some((id) => id === item.nodeId)
|
||||||
}));
|
}));
|
||||||
const entryNodes = runtimeNodes.filter((item) => item.isEntry);
|
const entryNodes = runtimeNodes.filter((item) => {
|
||||||
|
if (item.isEntry) {
|
||||||
const runtimeNodeStatus: Record<string, string> = entryNodes
|
|
||||||
.map((node) => {
|
|
||||||
const status = checkNodeRunStatus({
|
|
||||||
node,
|
|
||||||
nodesMap: new Map(runtimeNodes.map((item) => [item.nodeId, item])),
|
|
||||||
runtimeEdges: debugData?.runtimeEdges || []
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
nodeId: node.nodeId,
|
|
||||||
status
|
|
||||||
};
|
|
||||||
})
|
|
||||||
.reduce(
|
|
||||||
(acc, cur) => ({
|
|
||||||
...acc,
|
|
||||||
[cur.nodeId]: cur.status
|
|
||||||
}),
|
|
||||||
{}
|
|
||||||
);
|
|
||||||
|
|
||||||
// 3. Set entry node status to running
|
|
||||||
entryNodes.forEach((node) => {
|
|
||||||
if (runtimeNodeStatus[node.nodeId] !== 'wait') {
|
|
||||||
onChangeNode({
|
onChangeNode({
|
||||||
nodeId: node.nodeId,
|
nodeId: item.nodeId,
|
||||||
type: 'attr',
|
type: 'attr',
|
||||||
key: 'debugResult',
|
key: 'debugResult',
|
||||||
value: defaultRunningStatus
|
value: defaultRunningStatus
|
||||||
});
|
});
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// 4. Run one step
|
// 3. Run one step
|
||||||
const {
|
const { memoryEdges, entryNodeIds, skipNodeQueue, nodeResponses, newVariables } =
|
||||||
finishedEdges,
|
await postWorkflowDebug({
|
||||||
finishedNodes,
|
nodes: runtimeNodes,
|
||||||
nextStepRunNodes,
|
edges: debugData.runtimeEdges,
|
||||||
flowResponses,
|
skipNodeQueue: debugData.skipNodeQueue,
|
||||||
newVariables,
|
variables: {
|
||||||
workflowInteractiveResponse
|
appId,
|
||||||
} = await postWorkflowDebug({
|
cTime: formatTime2YMDHMW(),
|
||||||
nodes: runtimeNodes,
|
...debugData.variables
|
||||||
edges: debugData.runtimeEdges,
|
},
|
||||||
variables: {
|
query: debugData.query, // 添加 query 参数
|
||||||
appId,
|
history: debugData.history,
|
||||||
cTime: formatTime2YMDHMW(),
|
appId
|
||||||
...debugData.variables
|
});
|
||||||
},
|
|
||||||
query: debugData.query, // 添加 query 参数
|
|
||||||
history: debugData.history,
|
|
||||||
appId
|
|
||||||
});
|
|
||||||
|
|
||||||
// 5. Store debug result
|
// 4. Store debug result
|
||||||
setWorkflowDebugData({
|
setWorkflowDebugData({
|
||||||
runtimeNodes: finishedNodes,
|
runtimeNodes: debugData.runtimeNodes,
|
||||||
// edges need to save status
|
runtimeEdges: memoryEdges,
|
||||||
runtimeEdges: finishedEdges,
|
entryNodeIds,
|
||||||
nextRunNodes: nextStepRunNodes,
|
skipNodeQueue,
|
||||||
variables: newVariables,
|
variables: newVariables
|
||||||
workflowInteractiveResponse: workflowInteractiveResponse
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// 6. selected entry node and Update entry node debug result
|
// 5. selected entry node and Update entry node debug result
|
||||||
setNodes((state) =>
|
setNodes((state) =>
|
||||||
state.map((node) => {
|
state.map((node) => {
|
||||||
const isEntryNode = entryNodes.some((item) => item.nodeId === node.data.nodeId);
|
const isEntryNode = entryNodes.some((item) => item.nodeId === node.data.nodeId);
|
||||||
|
|
||||||
if (!isEntryNode || runtimeNodeStatus[node.data.nodeId] === 'wait') return node;
|
const result = nodeResponses[node.data.nodeId];
|
||||||
|
if (!result) return node;
|
||||||
const result = flowResponses.find((item) => item.nodeId === node.data.nodeId);
|
|
||||||
|
|
||||||
if (runtimeNodeStatus[node.data.nodeId] === 'skip') {
|
|
||||||
return {
|
|
||||||
...node,
|
|
||||||
selected: isEntryNode,
|
|
||||||
data: {
|
|
||||||
...node.data,
|
|
||||||
debugResult: {
|
|
||||||
status: 'skipped',
|
|
||||||
showResult: true,
|
|
||||||
isExpired: false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return {
|
return {
|
||||||
...node,
|
...node,
|
||||||
selected: isEntryNode,
|
selected: result.type === 'run' && isEntryNode,
|
||||||
data: {
|
data: {
|
||||||
...node.data,
|
...node.data,
|
||||||
debugResult: {
|
debugResult: {
|
||||||
status: 'success',
|
status: result.type === 'run' ? 'success' : 'skipped',
|
||||||
response: result,
|
response: result.response,
|
||||||
showResult: true,
|
showResult: true,
|
||||||
isExpired: false,
|
isExpired: false,
|
||||||
workflowInteractiveResponse: workflowInteractiveResponse
|
workflowInteractiveResponse: result.interactiveResponse
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -799,13 +756,9 @@ const WorkflowContextProvider = ({
|
|||||||
);
|
);
|
||||||
|
|
||||||
// Check for an empty response(Skip node)
|
// Check for an empty response(Skip node)
|
||||||
if (
|
// if (!workflowInteractiveResponse && flowResponses.length === 0 && entryNodeIds.length > 0) {
|
||||||
!workflowInteractiveResponse &&
|
// onNextNodeDebug(debugData);
|
||||||
flowResponses.length === 0 &&
|
// }
|
||||||
nextStepRunNodes.length > 0
|
|
||||||
) {
|
|
||||||
onNextNodeDebug(debugData);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
entryNodes.forEach((node) => {
|
entryNodes.forEach((node) => {
|
||||||
onChangeNode({
|
onChangeNode({
|
||||||
@@ -856,7 +809,10 @@ const WorkflowContextProvider = ({
|
|||||||
const data: DebugDataType = {
|
const data: DebugDataType = {
|
||||||
runtimeNodes,
|
runtimeNodes,
|
||||||
runtimeEdges,
|
runtimeEdges,
|
||||||
nextRunNodes: runtimeNodes.filter((node) => node.nodeId === entryNodeId),
|
entryNodeIds: runtimeNodes
|
||||||
|
.filter((node) => node.nodeId === entryNodeId)
|
||||||
|
.map((node) => node.nodeId),
|
||||||
|
skipNodeQueue: [],
|
||||||
variables,
|
variables,
|
||||||
query,
|
query,
|
||||||
history
|
history
|
||||||
|
@@ -21,6 +21,7 @@ async function handler(
|
|||||||
const {
|
const {
|
||||||
nodes = [],
|
nodes = [],
|
||||||
edges = [],
|
edges = [],
|
||||||
|
skipNodeQueue,
|
||||||
variables = {},
|
variables = {},
|
||||||
appId,
|
appId,
|
||||||
query = [],
|
query = [],
|
||||||
@@ -47,34 +48,34 @@ async function handler(
|
|||||||
|
|
||||||
// auth balance
|
// auth balance
|
||||||
const { timezone, externalProvider } = await getUserChatInfoAndAuthTeamPoints(tmbId);
|
const { timezone, externalProvider } = await getUserChatInfoAndAuthTeamPoints(tmbId);
|
||||||
const lastInteractive = getLastInteractiveValue(history);
|
const interactive = getLastInteractiveValue(history);
|
||||||
|
|
||||||
/* start process */
|
/* start process */
|
||||||
const { flowUsages, flowResponses, debugResponse, newVariables, workflowInteractiveResponse } =
|
const { flowUsages, debugResponse, newVariables } = await dispatchWorkFlow({
|
||||||
await dispatchWorkFlow({
|
res,
|
||||||
res,
|
lang: getLocale(req),
|
||||||
lang: getLocale(req),
|
requestOrigin: req.headers.origin,
|
||||||
requestOrigin: req.headers.origin,
|
mode: 'debug',
|
||||||
mode: 'debug',
|
timezone,
|
||||||
timezone,
|
externalProvider,
|
||||||
externalProvider,
|
uid: tmbId,
|
||||||
uid: tmbId,
|
runningAppInfo: {
|
||||||
runningAppInfo: {
|
id: app._id,
|
||||||
id: app._id,
|
teamId: app.teamId,
|
||||||
teamId: app.teamId,
|
tmbId: app.tmbId
|
||||||
tmbId: app.tmbId
|
},
|
||||||
},
|
runningUserInfo: await getRunningUserInfoByTmbId(tmbId),
|
||||||
runningUserInfo: await getRunningUserInfoByTmbId(tmbId),
|
runtimeNodes: nodes,
|
||||||
runtimeNodes: nodes,
|
runtimeEdges: edges,
|
||||||
runtimeEdges: edges,
|
defaultSkipNodeQueue: skipNodeQueue,
|
||||||
lastInteractive,
|
lastInteractive: interactive,
|
||||||
variables,
|
variables,
|
||||||
query: query,
|
query: query,
|
||||||
chatConfig: defaultApp.chatConfig,
|
chatConfig: defaultApp.chatConfig,
|
||||||
histories: history,
|
histories: history,
|
||||||
stream: false,
|
stream: false,
|
||||||
maxRunTimes: WORKFLOW_MAX_RUN_TIMES
|
maxRunTimes: WORKFLOW_MAX_RUN_TIMES
|
||||||
});
|
});
|
||||||
|
|
||||||
createChatUsage({
|
createChatUsage({
|
||||||
appName: `${app.name}-Debug`,
|
appName: `${app.name}-Debug`,
|
||||||
@@ -86,10 +87,8 @@ async function handler(
|
|||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...debugResponse,
|
...debugResponse!,
|
||||||
newVariables,
|
newVariables
|
||||||
flowResponses,
|
|
||||||
workflowInteractiveResponse
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user