mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 12:20:34 +00:00
4.8.10 test (#2573)
* feat: more debug response * fix: debug edge status * perf: doc * fix: workflow edge check * perf: i18n * package.json * perf: markdown mask
This commit is contained in:
@@ -254,13 +254,15 @@ export const runToolWithFunctionCall = async (
|
||||
// console.log(tokens, 'tool');
|
||||
|
||||
// Run tool status
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
if (node.showStatus) {
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// tool assistant
|
||||
const toolAssistants = toolsRunResponse
|
||||
|
@@ -258,13 +258,15 @@ export const runToolWithPromptCall = async (
|
||||
})();
|
||||
|
||||
// Run tool status
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
if (node.showStatus) {
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 合并工具调用的结果,使用 functionCall 格式存储。
|
||||
const assistantToolMsgParams: ChatCompletionAssistantMessageParam = {
|
||||
|
@@ -265,13 +265,15 @@ export const runToolWithToolChoice = async (
|
||||
// console.log(tokens, 'tool');
|
||||
|
||||
// Run tool status
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
if (node.showStatus) {
|
||||
workflowStreamResponse?.({
|
||||
event: SseResponseEventEnum.flowNodeStatus,
|
||||
data: {
|
||||
status: 'running',
|
||||
name: node.name
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// tool assistant
|
||||
const toolAssistants = toolsRunResponse
|
||||
|
@@ -310,9 +310,11 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
|
||||
const flat = result.flat().filter(Boolean) as unknown as {
|
||||
node: RuntimeNodeItemType;
|
||||
runStatus: 'run' | 'skip';
|
||||
result: Record<string, any>;
|
||||
}[];
|
||||
if (flat.length === 0) return;
|
||||
// If there are no running nodes, the workflow is complete
|
||||
if (!flat.some((item) => item.runStatus === 'run')) return;
|
||||
|
||||
// Update the node output at the end of the run and get the next nodes
|
||||
const nextNodes = flat.map((item) => nodeOutput(item.node, item.result)).flat();
|
||||
@@ -454,6 +456,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
|
||||
return {
|
||||
node,
|
||||
runStatus: 'run',
|
||||
result: {
|
||||
...dispatchRes,
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: formatResponseData
|
||||
@@ -467,6 +470,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
|
||||
return {
|
||||
node,
|
||||
runStatus: 'skip',
|
||||
result: {
|
||||
[DispatchNodeResponseKeyEnum.skipHandleId]: targetEdges.map((item) => item.sourceHandle)
|
||||
}
|
||||
|
@@ -1,14 +1,18 @@
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import type {
|
||||
DispatchNodeResultType,
|
||||
ModuleDispatchProps
|
||||
} from '@fastgpt/global/core/workflow/runtime/type';
|
||||
|
||||
export type UserChatInputProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
}>;
|
||||
type Response = {
|
||||
type Response = DispatchNodeResultType<{
|
||||
[NodeOutputKeyEnum.userChatInput]: string;
|
||||
[NodeOutputKeyEnum.userFiles]: string[];
|
||||
};
|
||||
}>;
|
||||
|
||||
export const dispatchWorkflowStart = (props: Record<string, any>): Response => {
|
||||
const {
|
||||
@@ -19,6 +23,7 @@ export const dispatchWorkflowStart = (props: Record<string, any>): Response => {
|
||||
const { text, files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
return {
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: {},
|
||||
[NodeInputKeyEnum.userChatInput]: text || userChatInput,
|
||||
[NodeOutputKeyEnum.userFiles]: files
|
||||
.map((item) => {
|
||||
|
@@ -13,7 +13,6 @@ import {
|
||||
import { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { getElseIFLabel, getHandleId } from '@fastgpt/global/core/workflow/utils';
|
||||
import { getReferenceVariableValue } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.condition]: IfElseConditionType;
|
||||
|
Reference in New Issue
Block a user