User select node (#2397)

* feat: add user select node (#2300)

* feat: add user select node

* fix

* type

* fix

* fix

* fix

* perf: user select code

* perf: user select histories

* perf: i18n

---------

Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2024-08-15 12:27:04 +08:00
committed by GitHub
parent f8b8fcc172
commit fdeb1590d7
51 changed files with 1060 additions and 184 deletions

View File

@@ -9,6 +9,7 @@ import type {
ChatCompletionUserMessageParam as SdkChatCompletionUserMessageParam
} from 'openai/resources';
import { ChatMessageTypeEnum } from './constants';
import { InteractiveNodeResponseItemType } from '../workflow/template/system/userSelect/type';
export * from 'openai/resources';
@@ -33,6 +34,7 @@ export type ChatCompletionMessageParam = (
| CustomChatCompletionUserMessageParam
) & {
dataId?: string;
interactive?: InteractiveNodeResponseItemType;
};
export type SdkChatCompletionMessageParam = SdkChatCompletionMessageParam;

View File

@@ -124,6 +124,13 @@ export const chats2GPTMessages = ({
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: value.text.content
});
} else if (value.type === ChatItemValueTypeEnum.interactive) {
results = results.concat({
dataId,
role: ChatCompletionRequestMessageRoleEnum.Assistant,
interactive: value.interactive,
content: ''
});
}
});
}
@@ -254,6 +261,12 @@ export const GPTMessages2Chats = (
]
});
}
} else if (item.interactive) {
value.push({
//@ts-ignore
type: ChatItemValueTypeEnum.interactive,
interactive: item.interactive
});
}
}

View File

@@ -24,7 +24,8 @@ export enum ChatFileTypeEnum {
export enum ChatItemValueTypeEnum {
text = 'text',
file = 'file',
tool = 'tool'
tool = 'tool',
interactive = 'interactive'
}
export enum ChatSourceEnum {

View File

@@ -15,6 +15,7 @@ import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d';
import { DatasetSearchModeEnum } from '../dataset/constants';
import { DispatchNodeResponseType } from '../workflow/runtime/type.d';
import { ChatBoxInputType } from '../../../../projects/app/src/components/core/chat/ChatContainer/ChatBox/type';
import { InteractiveNodeResponseItemType } from '../workflow/template/system/userSelect/type';
export type ChatSchema = {
_id: string;
@@ -67,11 +68,12 @@ export type SystemChatItemType = {
value: SystemChatItemValueItemType[];
};
export type AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.tool;
type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.tool | ChatItemValueTypeEnum.interactive;
text?: {
content: string;
};
tools?: ToolModuleResponseItemType[];
interactive?: InteractiveNodeResponseItemType;
};
export type AIChatItemType = {
obj: ChatRoleEnum.AI;
@@ -153,6 +155,13 @@ export type ChatHistoryItemResType = DispatchNodeResponseType & {
moduleName: string;
};
/* ---------- node outputs ------------ */
export type NodeOutputItemType = {
nodeId: string;
key: NodeOutputKeyEnum;
value: any;
};
/* One tool run response */
export type ToolRunResponseItemType = any;
/* tool module response */

View File

@@ -3,6 +3,7 @@ export enum FlowNodeTemplateTypeEnum {
ai = 'ai',
function = 'function',
tools = 'tools',
interactive = 'interactive',
search = 'search',
multimodal = 'multimodal',
@@ -123,7 +124,9 @@ export enum NodeInputKeyEnum {
codeType = 'codeType', // js|py
// read files
fileUrlList = 'fileUrlList'
fileUrlList = 'fileUrlList',
// user select
userSelectOptions = 'userSelectOptions'
}
export enum NodeOutputKeyEnum {
@@ -162,7 +165,11 @@ export enum NodeOutputKeyEnum {
// plugin
pluginStart = 'pluginStart',
ifElseResult = 'ifElseResult'
// if else
ifElseResult = 'ifElseResult',
//user select
selectResult = 'selectResult'
}
export enum VariableInputEnum {

View File

@@ -118,7 +118,8 @@ export enum FlowNodeTypeEnum {
code = 'code',
textEditor = 'textEditor',
customFeedback = 'customFeedback',
readFiles = 'readFiles'
readFiles = 'readFiles',
userSelect = 'userSelect'
}
// node IO value type

View File

@@ -10,7 +10,9 @@ export enum SseResponseEventEnum {
toolParams = 'toolParams', // tool params return
toolResponse = 'toolResponse', // tool response return
flowResponses = 'flowResponses', // sse response request
updateVariables = 'updateVariables'
updateVariables = 'updateVariables',
interactive = 'interactive' // user select
}
export enum DispatchNodeResponseKeyEnum {
@@ -19,7 +21,9 @@ export enum DispatchNodeResponseKeyEnum {
nodeDispatchUsages = 'nodeDispatchUsages', // the node bill.
childrenResponses = 'childrenResponses', // Some nodes make recursive calls that need to be returned
toolResponses = 'toolResponses', // The result is passed back to the tool node for use
assistantResponses = 'assistantResponses' // assistant response
assistantResponses = 'assistantResponses', // assistant response
interactive = 'INTERACTIVE' // is interactive
}
export const needReplaceReferenceInputTypeList = [

View File

@@ -3,7 +3,8 @@ import {
ChatItemType,
UserChatItemValueItemType,
ChatItemValueItemType,
ToolRunResponseItemType
ToolRunResponseItemType,
NodeOutputItemType
} from '../../chat/type';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../type/io.d';
import { StoreNodeItemType } from '../type/node';
@@ -17,6 +18,7 @@ import { AppDetailType, AppSchema } from '../../app/type';
import { RuntimeNodeItemType } from '../runtime/type';
import { RuntimeEdgeItemType } from './edge';
import { ReadFileNodeResponse } from '../template/system/readFiles/type';
import { UserSelectOptionType } from '../template/system/userSelect/type';
/* workflow props */
export type ChatDispatchProps = {
@@ -153,6 +155,9 @@ export type DispatchNodeResponseType = {
// read files
readFilesResult?: string;
readFiles?: ReadFileNodeResponse;
// user select
userSelectResult?: string;
};
export type DispatchNodeResultType<T> = {

View File

@@ -6,7 +6,9 @@ import { StoreEdgeItemType } from '../type/edge';
import { RuntimeEdgeItemType, RuntimeNodeItemType } from './type';
import { VARIABLE_NODE_ID } from '../constants';
import { isReferenceValue } from '../utils';
import { ReferenceValueProps } from '../type/io';
import { FlowNodeOutputItemType, ReferenceValueProps } from '../type/io';
import { ChatItemType, NodeOutputItemType } from '../../../core/chat/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '../../../core/chat/constants';
export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number => {
let limit = 10;
@@ -25,7 +27,35 @@ export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number
return limit * 2;
};
export const initWorkflowEdgeStatus = (edges: StoreEdgeItemType[]): RuntimeEdgeItemType[] => {
export const getLastInteractiveValue = (histories: ChatItemType[]) => {
const lastAIMessage = histories.findLast((item) => item.obj === ChatRoleEnum.AI);
if (lastAIMessage) {
const interactiveValue = lastAIMessage.value.find(
(v) => v.type === ChatItemValueTypeEnum.interactive
);
if (interactiveValue && 'interactive' in interactiveValue) {
return interactiveValue.interactive;
}
}
return null;
};
export const initWorkflowEdgeStatus = (
edges: StoreEdgeItemType[],
histories?: ChatItemType[]
): RuntimeEdgeItemType[] => {
// If there is a history, use the last interactive value
if (!!histories) {
const memoryEdges = getLastInteractiveValue(histories)?.memoryEdges;
if (memoryEdges && memoryEdges.length > 0) {
return memoryEdges;
}
}
return (
edges?.map((edge) => ({
...edge,
@@ -34,7 +64,19 @@ export const initWorkflowEdgeStatus = (edges: StoreEdgeItemType[]): RuntimeEdgeI
);
};
export const getDefaultEntryNodeIds = (nodes: (StoreNodeItemType | RuntimeNodeItemType)[]) => {
export const getWorkflowEntryNodeIds = (
nodes: (StoreNodeItemType | RuntimeNodeItemType)[],
histories?: ChatItemType[]
) => {
// If there is a history, use the last interactive entry node
if (!!histories) {
const entryNodeIds = getLastInteractiveValue(histories)?.entryNodeIds;
if (Array.isArray(entryNodeIds) && entryNodeIds.length > 0) {
return entryNodeIds;
}
}
const entryList = [
FlowNodeTypeEnum.systemConfig,
FlowNodeTypeEnum.workflowStart,
@@ -212,3 +254,29 @@ export const textAdaptGptResponse = ({
]
});
};
/* Update runtimeNode's outputs with interactive data from history */
export function rewriteNodeOutputByHistories(
histories: ChatItemType[],
runtimeNodes: RuntimeNodeItemType[]
) {
const interactive = getLastInteractiveValue(histories);
if (!interactive?.nodeOutputs) {
return runtimeNodes;
}
return runtimeNodes.map((node) => {
return {
...node,
outputs: node.outputs.map((output: FlowNodeOutputItemType) => {
return {
...output,
value:
interactive?.nodeOutputs?.find(
(item: NodeOutputItemType) => item.nodeId === node.nodeId && item.key === output.key
)?.value || output?.value
};
})
};
});
}

View File

@@ -26,6 +26,7 @@ import { CodeNode } from './system/sandbox';
import { TextEditorNode } from './system/textEditor';
import { CustomFeedbackNode } from './system/customFeedback';
import { ReadFilesNodes } from './system/readFiles';
import { UserSelectNode } from './system/userSelect/index';
const systemNodes: FlowNodeTemplateType[] = [
AiChatModule,
@@ -51,7 +52,8 @@ export const appSystemModuleTemplates: FlowNodeTemplateType[] = [
SystemConfigNode,
WorkflowStart,
...systemNodes,
CustomFeedbackNode
CustomFeedbackNode,
UserSelectNode
];
/* plugin flow module templates */
export const pluginSystemModuleTemplates: FlowNodeTemplateType[] = [

View File

@@ -0,0 +1,62 @@
import { i18nT } from '../../../../../../web/i18n/utils';
import {
FlowNodeTemplateTypeEnum,
NodeInputKeyEnum,
NodeOutputKeyEnum,
WorkflowIOValueTypeEnum
} from '../../../constants';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../../node/constant';
import { FlowNodeTemplateType } from '../../../type/node.d';
import { getHandleConfig } from '../../utils';
export const UserSelectNode: FlowNodeTemplateType = {
id: FlowNodeTypeEnum.userSelect,
templateType: FlowNodeTemplateTypeEnum.interactive,
flowNodeType: FlowNodeTypeEnum.userSelect,
sourceHandle: getHandleConfig(false, false, false, false),
targetHandle: getHandleConfig(true, false, true, true),
avatar: 'core/workflow/template/userSelect',
diagram: '/imgs/app/userSelect.svg',
name: i18nT('app:workflow.user_select'),
intro: i18nT(`app:workflow.user_select_tip`),
showStatus: true,
version: '489',
inputs: [
{
key: NodeInputKeyEnum.description,
renderTypeList: [FlowNodeInputTypeEnum.textarea],
valueType: WorkflowIOValueTypeEnum.string,
label: i18nT('app:workflow.select_description')
},
{
key: NodeInputKeyEnum.userSelectOptions,
renderTypeList: [FlowNodeInputTypeEnum.custom],
valueType: WorkflowIOValueTypeEnum.any,
label: '',
value: [
{
value: 'Confirm',
key: 'option1'
},
{
value: 'Cancel',
key: 'option2'
}
]
}
],
outputs: [
{
id: NodeOutputKeyEnum.selectResult,
key: NodeOutputKeyEnum.selectResult,
required: true,
label: i18nT('app:workflow.select_result'),
valueType: WorkflowIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.static
}
]
};

View File

@@ -0,0 +1,26 @@
import { NodeOutputItemType } from '../../../../chat/type';
import { FlowNodeOutputItemType } from '../../../type/io';
import { RuntimeEdgeItemType } from '../../../runtime/type';
export type UserSelectOptionItemType = {
key: string;
value: string;
};
type InteractiveBasicType = {
entryNodeIds: string[];
memoryEdges: RuntimeEdgeItemType[];
nodeOutputs: NodeOutputItemType[];
};
type UserSelectInteractive = {
type: 'userSelect';
params: {
// description: string;
userSelectOptions: UserSelectOptionItemType[];
userSelectedVal?: string;
};
};
export type InteractiveNodeResponseItemType = InteractiveBasicType & UserSelectInteractive;
export type UserInteractiveType = UserSelectInteractive;

View File

@@ -66,6 +66,8 @@ export type FlowNodeTemplateType = FlowNodeCommonType & {
// action
forbidDelete?: boolean; // forbid delete
unique?: boolean;
diagram?: string; // diagram url
};
export type NodeTemplateListItemType = {

View File

@@ -1,7 +1,7 @@
import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { MongoChatItem } from './chatItemSchema';
import { addLog } from '../../common/system/log';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { delFileByFileIdList, getGFSCollection } from '../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { MongoChat } from './chatSchema';
@@ -79,6 +79,52 @@ export const addCustomFeedbacks = async ({
}
};
/*
Update the user selected index of the interactive module
*/
export const updateUserSelectedResult = async ({
appId,
chatId,
userSelectedVal
}: {
appId: string;
chatId?: string;
userSelectedVal: string;
}) => {
if (!chatId) return;
try {
const chatItem = await MongoChatItem.findOne(
{ appId, chatId, obj: ChatRoleEnum.AI },
'value'
).sort({ _id: -1 });
if (!chatItem) return;
const interactiveValue = chatItem.value.find(
(v) => v.type === ChatItemValueTypeEnum.interactive
);
if (
!interactiveValue ||
interactiveValue.type !== ChatItemValueTypeEnum.interactive ||
!interactiveValue.interactive?.params
)
return;
interactiveValue.interactive = {
...interactiveValue.interactive,
params: {
...interactiveValue.interactive.params,
userSelectedVal
}
};
await chatItem.save();
} catch (error) {
addLog.error('updateUserSelectedResult error', error);
}
};
/*
Delete chat files
1. ChatId: Delete one chat files

View File

@@ -1,6 +1,9 @@
import { NextApiResponse } from 'next';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import {
DispatchNodeResponseKeyEnum,
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type {
ChatDispatchProps,
@@ -10,6 +13,7 @@ import type { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/
import type {
AIChatItemValueItemType,
ChatHistoryItemResType,
NodeOutputItemType,
ToolRunResponseItemType
} from '@fastgpt/global/core/chat/type.d';
import {
@@ -17,7 +21,7 @@ import {
FlowNodeTypeEnum
} from '@fastgpt/global/core/workflow/node/constant';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { responseWriteNodeStatus } from '../../../common/response';
import { responseWrite, responseWriteNodeStatus } from '../../../common/response';
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
import { replaceVariableLabel } from '@fastgpt/global/core/workflow/utils';
@@ -37,7 +41,8 @@ import { dispatchPluginOutput } from './plugin/runOutput';
import { removeSystemVariable, valueTypeFormat } from './utils';
import {
filterWorkflowEdges,
checkNodeRunStatus
checkNodeRunStatus,
getLastInteractiveValue
} from '@fastgpt/global/core/workflow/runtime/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { dispatchRunTools } from './agent/runTool/index';
@@ -56,6 +61,13 @@ import { dispatchRunCode } from './code/run';
import { dispatchTextEditor } from './tools/textEditor';
import { dispatchCustomFeedback } from './tools/customFeedback';
import { dispatchReadFiles } from './tools/readFiles';
import { dispatchUserSelect } from './interactive/userSelect';
import { FlowNodeOutputItemType } from '@fastgpt/global/core/workflow/type/io';
import {
InteractiveNodeResponseItemType,
UserInteractiveType,
UserSelectInteractive
} from '@fastgpt/global/core/workflow/template/system/userSelect/type';
const callbackMap: Record<FlowNodeTypeEnum, Function> = {
[FlowNodeTypeEnum.workflowStart]: dispatchWorkflowStart,
@@ -80,6 +92,7 @@ const callbackMap: Record<FlowNodeTypeEnum, Function> = {
[FlowNodeTypeEnum.textEditor]: dispatchTextEditor,
[FlowNodeTypeEnum.customFeedback]: dispatchCustomFeedback,
[FlowNodeTypeEnum.readFiles]: dispatchReadFiles,
[FlowNodeTypeEnum.userSelect]: dispatchUserSelect,
// none
[FlowNodeTypeEnum.systemConfig]: dispatchSystemConfig,
@@ -171,7 +184,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
}
}
}
/* Pass the output of the module to the next stage */
/* Pass the output of the node, to get next nodes and update edge status */
function nodeOutput(
node: RuntimeNodeItemType,
result: Record<string, any> = {}
@@ -211,9 +224,58 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
return nextStepNodes;
}
function checkNodeCanRun(nodes: RuntimeNodeItemType[] = []): Promise<any> {
return Promise.all(
nodes.map(async (node) => {
/* Have interactive result, computed edges and node outputs */
function handleInteractiveResult({
entryNodeIds,
interactiveResponse
}: {
entryNodeIds: string[];
interactiveResponse: UserSelectInteractive;
}): AIChatItemValueItemType {
// Get node outputs
const nodeOutputs: NodeOutputItemType[] = [];
runtimeNodes.forEach((node) => {
node.outputs.forEach((output) => {
if (output.value) {
nodeOutputs.push({
nodeId: node.nodeId,
key: output.key as NodeOutputKeyEnum,
value: output.value
});
}
});
});
const interactiveResult: InteractiveNodeResponseItemType = {
...interactiveResponse,
entryNodeIds,
memoryEdges: runtimeEdges.map((edge) => ({
...edge,
status: entryNodeIds.includes(edge.target)
? 'active'
: entryNodeIds.includes(edge.source)
? 'waiting'
: edge.status
})),
nodeOutputs
};
if (stream && res) {
responseWrite({
res,
event: SseResponseEventEnum.interactive,
data: JSON.stringify({ interactive: interactiveResult })
});
}
return {
type: ChatItemValueTypeEnum.interactive,
interactive: interactiveResult
};
}
async function checkNodeCanRun(node: RuntimeNodeItemType): Promise<any> {
const status = checkNodeRunStatus({
node,
runtimeEdges
@@ -225,6 +287,12 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
await surrenderProcess();
const response:
| {
node: RuntimeNodeItemType;
result: Record<string, any>;
}
| undefined = await (() => {
if (status === 'run') {
addLog.debug(`[dispatchWorkFlow] nodeRunWithActive: ${node.name}`);
return nodeRunWithActive(node);
@@ -233,32 +301,40 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
addLog.debug(`[dispatchWorkFlow] nodeRunWithSkip: ${node.name}`);
return nodeRunWithSkip(node);
}
})();
return;
})
).then((result) => {
const flat = result.flat().filter(Boolean) as unknown as {
node: RuntimeNodeItemType;
result: Record<string, any>;
}[];
if (flat.length === 0) return;
if (!response) return;
// Update the node output at the end of the run and get the next nodes
const nextNodes = flat.map((item) => nodeOutput(item.node, item.result)).flat();
const nextNodes = nodeOutput(response.node, response.result);
// Remove repeat nodes(Make sure that the node is only executed once)
const filterNextNodes = nextNodes.filter(
(node, index, self) => self.findIndex((t) => t.nodeId === node.nodeId) === index
);
return checkNodeCanRun(filterNextNodes);
});
// In the current version, only one interactive node is allowed at the same time
const interactiveResponse: UserInteractiveType | undefined =
response.result?.[DispatchNodeResponseKeyEnum.interactive];
if (interactiveResponse) {
chatAssistantResponse.push(
handleInteractiveResult({
entryNodeIds: [response.node.nodeId],
interactiveResponse
})
);
return;
}
return Promise.all(filterNextNodes.map(checkNodeCanRun));
}
// 运行完一轮后,清除连线的状态,避免污染进程
function nodeRunFinish(node: RuntimeNodeItemType) {
const edges = runtimeEdges.filter((item) => item.target === node.nodeId);
edges.forEach((item) => {
node.isEntry = false;
runtimeEdges.forEach((item) => {
if (item.target === node.nodeId) {
item.status = 'waiting';
}
});
}
/* Inject data into module input */
@@ -393,12 +469,12 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
// start process width initInput
const entryNodes = runtimeNodes.filter((item) => item.isEntry);
console.log(runtimeEdges);
// reset entry
runtimeNodes.forEach((item) => {
item.isEntry = false;
});
await checkNodeCanRun(entryNodes);
// runtimeNodes.forEach((item) => {
// item.isEntry = false;
// });
await Promise.all(entryNodes.map(checkNodeCanRun));
// focus try to run pluginOutput
const pluginOutputModule = runtimeNodes.find(

View File

@@ -0,0 +1,94 @@
import {
DispatchNodeResponseKeyEnum,
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import {
DispatchNodeResultType,
ModuleDispatchProps
} from '@fastgpt/global/core/workflow/runtime/type';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
import type {
UserSelectInteractive,
UserSelectOptionItemType
} from '@fastgpt/global/core/workflow/template/system/userSelect/type';
import { updateUserSelectedResult } from '../../../chat/controller';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { responseWrite } from '../../../../common/response';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
type Props = ModuleDispatchProps<{
[NodeInputKeyEnum.description]: string;
[NodeInputKeyEnum.userSelectOptions]: UserSelectOptionItemType[];
}>;
type UserSelectResponse = DispatchNodeResultType<{
[NodeOutputKeyEnum.answerText]?: string;
[DispatchNodeResponseKeyEnum.interactive]?: UserSelectInteractive;
[NodeOutputKeyEnum.selectResult]?: string;
}>;
export const dispatchUserSelect = async (props: Props): Promise<UserSelectResponse> => {
const {
res,
detail,
histories,
stream,
app: { _id: appId },
chatId,
node: { nodeId, isEntry },
params: { description, userSelectOptions },
query
} = props;
// Interactive node is not the entry node, return interactive result
if (!isEntry) {
const answerText = description ? `\n${description}` : undefined;
if (res && stream && answerText) {
responseWrite({
res,
event: detail ? SseResponseEventEnum.fastAnswer : undefined,
data: textAdaptGptResponse({
text: answerText
})
});
}
return {
[NodeOutputKeyEnum.answerText]: answerText,
[DispatchNodeResponseKeyEnum.interactive]: {
type: 'userSelect',
params: {
userSelectOptions
}
}
};
}
const { text: userSelectedVal } = chatValue2RuntimePrompt(query);
// Error status
if (userSelectedVal === undefined) {
return {
[DispatchNodeResponseKeyEnum.skipHandleId]: userSelectOptions.map((item) =>
getHandleId(nodeId, 'source', item.value)
)
};
}
// Update db
updateUserSelectedResult({
appId,
chatId,
userSelectedVal
});
return {
[DispatchNodeResponseKeyEnum.skipHandleId]: userSelectOptions
.filter((item) => item.value !== userSelectedVal)
.map((item: any) => getHandleId(nodeId, 'source', item.key)),
[DispatchNodeResponseKeyEnum.nodeResponse]: {
userSelectResult: userSelectedVal
},
[NodeOutputKeyEnum.selectResult]: userSelectedVal
};
};

View File

@@ -4,7 +4,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { getPluginRuntimeById } from '../../../app/plugin/controller';
import {
getDefaultEntryNodeIds,
getWorkflowEntryNodeIds,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
@@ -49,7 +49,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
...props,
runtimeNodes: storeNodes2RuntimeNodes(plugin.nodes, getDefaultEntryNodeIds(plugin.nodes)).map(
runtimeNodes: storeNodes2RuntimeNodes(plugin.nodes, getWorkflowEntryNodeIds(plugin.nodes)).map(
(node) => {
if (node.flowNodeType === FlowNodeTypeEnum.pluginInput) {
return {

View File

@@ -6,7 +6,7 @@ import { responseWrite } from '../../../../common/response';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import {
getDefaultEntryNodeIds,
getWorkflowEntryNodeIds,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes,
textAdaptGptResponse
@@ -67,7 +67,10 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const { flowResponses, flowUsages, assistantResponses } = await dispatchWorkFlow({
...props,
app: appData,
runtimeNodes: storeNodes2RuntimeNodes(appData.modules, getDefaultEntryNodeIds(appData.modules)),
runtimeNodes: storeNodes2RuntimeNodes(
appData.modules,
getWorkflowEntryNodeIds(appData.modules)
),
runtimeEdges: initWorkflowEdgeStatus(appData.edges),
histories: chatHistories,
query: runtimePrompt2ChatsValue({

View File

@@ -212,6 +212,8 @@ export const iconPaths = {
'core/workflow/template/textConcat': () =>
import('./icons/core/workflow/template/textConcat.svg'),
'core/workflow/template/toolCall': () => import('./icons/core/workflow/template/toolCall.svg'),
'core/workflow/template/userSelect': () =>
import('./icons/core/workflow/template/userSelect.svg'),
'core/workflow/template/variable': () => import('./icons/core/workflow/template/variable.svg'),
'core/workflow/template/variableUpdate': () =>
import('./icons/core/workflow/template/variableUpdate.svg'),

View File

@@ -0,0 +1,10 @@
<svg viewBox="0 0 32 32" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="32" height="32" rx="6" fill="url(#paint0_linear_8765_6055)"/>
<path fill-rule="evenodd" clip-rule="evenodd" d="M10.6666 7.11108C8.70296 7.11108 7.11108 8.70296 7.11108 10.6666V21.3333C7.11108 23.297 8.70296 24.8889 10.6666 24.8889H21.3333C23.297 24.8889 24.8889 23.297 24.8889 21.3333V10.6666C24.8889 8.70296 23.297 7.11108 21.3333 7.11108H10.6666ZM14.4283 12.586C14.7407 12.2736 14.7407 11.7671 14.4283 11.4547C14.1159 11.1422 13.6094 11.1422 13.2969 11.4547L11.5654 13.1862L10.9314 12.5522C10.619 12.2398 10.1124 12.2398 9.80002 12.5522C9.4876 12.8646 9.4876 13.3712 9.80001 13.6836L10.9973 14.8809C11.1537 15.0372 11.3586 15.1153 11.5636 15.1152C11.7697 15.1163 11.9762 15.0382 12.1335 14.8809L14.4283 12.586ZM22.4342 13.1678C22.4342 13.6096 22.0761 13.9678 21.6342 13.9678H16.8278C16.386 13.9678 16.0278 13.6096 16.0278 13.1678C16.0278 12.7259 16.386 12.3678 16.8278 12.3678L21.6342 12.3678C22.0761 12.3678 22.4342 12.7259 22.4342 13.1678ZM11.8673 21.111C12.7509 21.111 13.4673 20.3946 13.4673 19.511C13.4673 18.6273 12.7509 17.911 11.8673 17.911C10.9836 17.911 10.2673 18.6273 10.2673 19.511C10.2673 20.3946 10.9836 21.111 11.8673 21.111ZM22.4342 19.511C22.4342 19.9528 22.0761 20.311 21.6342 20.311H16.8278C16.386 20.311 16.0278 19.9528 16.0278 19.511C16.0278 19.0691 16.386 18.711 16.8278 18.711H21.6342C22.0761 18.711 22.4342 19.0691 22.4342 19.511Z" fill="white"/>
<defs>
<linearGradient id="paint0_linear_8765_6055" x1="16" y1="0" x2="4.88889" y2="29.3333" gradientUnits="userSpaceOnUse">
<stop stop-color="#3ED9AA"/>
<stop offset="1" stop-color="#13C786"/>
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

View File

@@ -1 +1,4 @@
<?xml version="1.0" standalone="no"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg t="1688632968712" class="icon" viewBox="0 0 1024 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="3386" xmlns:xlink="http://www.w3.org/1999/xlink" ><path d="M507.904 52.224q95.232 0 179.2 36.352t145.92 98.304 98.304 145.408 36.352 178.688-36.352 179.2-98.304 145.92-145.92 98.304-179.2 36.352-178.688-36.352-145.408-98.304-98.304-145.92-36.352-179.2 36.352-178.688 98.304-145.408 145.408-98.304 178.688-36.352zM736.256 573.44q30.72 0 55.296-15.872t24.576-47.616q0-30.72-24.576-45.568t-55.296-14.848l-452.608 0q-30.72 0-56.32 14.848t-25.6 45.568q0 31.744 25.6 47.616t56.32 15.872l452.608 0z" p-id="3387"></path></svg>
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 17" >
<path d="M5.33342 7.80417C4.99499 7.80417 4.72063 8.07852 4.72063 8.41695C4.72063 8.75538 4.99499 9.02974 5.33342 9.02974H10.6667C11.0052 9.02974 11.2795 8.75538 11.2795 8.41695C11.2795 8.07852 11.0052 7.80417 10.6667 7.80417H5.33342Z" />
<path fill-rule="evenodd" clip-rule="evenodd" d="M14.7901 8.41695C14.7901 12.167 11.7501 15.207 8.00008 15.207C4.25007 15.207 1.21008 12.167 1.21008 8.41695C1.21008 4.66694 4.25007 1.62695 8.00008 1.62695C11.7501 1.62695 14.7901 4.66694 14.7901 8.41695ZM13.4567 8.41695C13.4567 11.4306 11.0137 13.8736 8.00008 13.8736C4.98645 13.8736 2.54342 11.4306 2.54342 8.41695C2.54342 5.40332 4.98645 2.96029 8.00008 2.96029C11.0137 2.96029 13.4567 5.40332 13.4567 8.41695Z"/>
</svg>

Before

Width:  |  Height:  |  Size: 768 B

After

Width:  |  Height:  |  Size: 778 B

View File

@@ -17,6 +17,11 @@ export const workflowNodeTemplateList = [
label: i18nT('common:core.workflow.template.Search'),
list: []
},
{
type: FlowNodeTemplateTypeEnum.interactive,
label: i18nT('common:core.workflow.template.Interactive'),
list: []
},
{
type: FlowNodeTemplateTypeEnum.multimodal,
label: i18nT('common:core.workflow.template.Multimodal'),

View File

@@ -141,14 +141,20 @@
"workflow": {
"Input guide": "Input guide",
"file_url": "Url",
"option1": "Option 1",
"option2": "Option 2",
"read_files": "Documents parse",
"read_files_result": "Document parsing results",
"read_files_result_desc": "The original text of the document consists of the file name and the document content. Multiple files are separated by horizontal lines.",
"read_files_tip": "Parse all uploaded documents in the conversation and return the corresponding document content",
"select_description": "Select description",
"select_result": "Select result",
"template": {
"communication": "Communication"
},
"user_file_input": "Files url",
"user_file_input_desc": "Links to documents and images uploaded by users"
"user_file_input_desc": "Links to documents and images uploaded by users",
"user_select": "User select",
"user_select_tip": "The module can have multiple options that lead to different workflow branches"
}
}

View File

@@ -1,6 +1,5 @@
{
"Delete_all": "Delete all",
"delete_all_input_guide_confirm": "Confirm to delete all input guide lexicons",
"chat_history": "chat record",
"chat_input_guide_lexicon_is_empty": "No vocabulary has been configured yet",
"citations": "{{num}} citations",
@@ -13,25 +12,27 @@
"contextual_preview": "Contextual preview",
"csv_input_lexicon_tip": "Only supports CSV batch import, click to download the template",
"custom_input_guide_url": "Custom thesaurus address",
"delete_all_input_guide_confirm": "Confirm to delete all input guide lexicons",
"empty_directory": "There is nothing left to choose from in this directory~",
"file_amount_over": "Exceed maximum number of files {{max}}",
"in_progress": "in progress",
"input_guide": "Input guide",
"input_guide_lexicon": "Lexicon",
"input_guide_tip": "You can configure some preset questions. When the user enters a question, the relevant question is retrieved from these preset questions for prompt.",
"insert_input_guide,_some_data_already_exists": "Duplicate data, automatically filtered, insert: {{len}} data",
"is_chatting": "Chatting...please wait for the end",
"items": "strip",
"module_runtime_and": "module run time and",
"multiple_AI_conversations": "Multiple AI conversations",
"new_chat": "new conversation",
"new_input_guide_lexicon": "New lexicon",
"no_workflow_response": "No running data",
"plugins_output": "Plugin output",
"question_tip": "From left to right, the response order of each module",
"rearrangement": "Search results rearranged",
"select_file": "Select file",
"select_img": "Select images",
"stream_output": "stream output",
"view_citations": "View citations",
"web_site_sync": "Web site synchronization",
"file_amount_over": "Exceed maximum number of files {{max}}",
"input_guide": "Input guide",
"input_guide_lexicon": "Lexicon",
"input_guide_tip": "You can configure some preset questions. When the user enters a question, the relevant question is retrieved from these preset questions for prompt.",
"insert_input_guide,_some_data_already_exists": "Duplicate data, automatically filtered, insert: {{len}} data",
"new_input_guide_lexicon": "New lexicon",
"select_file": "Select file",
"select_img": "Select images"
"web_site_sync": "Web site synchronization"
}

View File

@@ -527,7 +527,8 @@
"module tokens": "total tokens",
"plugin output": "Plugin output value",
"search using reRank": "Result rearrangement",
"text output": "text output"
"text output": "text output",
"user_select_result": "User select result"
},
"retry": "Regenerate",
"tts": {
@@ -761,6 +762,7 @@
},
"module": {
"Add question type": "Add question type",
"Add_option": "Add option",
"Can not connect self": "Cannot connect to self",
"Confirm Delete Node": "Confirm delete node?",
"Data Type": "Data type",
@@ -771,6 +773,7 @@
"Default Value": "Default value",
"Default value": "Default value",
"Default value placeholder": "If not filled, the default return is an empty string",
"Diagram": "Diagram",
"Edit intro": "Edit description",
"Field Description": "Field description",
"Field Name": "Field name",
@@ -948,7 +951,9 @@
"OnRevert version confirm": "Confirm to revert to this version? It will save the configuration of the version being edited and create a new published version for the reverted version.",
"histories": "Publishing records"
},
"run_test": "Test",
"template": {
"Interactive": "Interactive",
"Multimodal": "Multimodal",
"Search": "Search"
},
@@ -1060,6 +1065,7 @@
"no_data": "No data",
"no_laf_env": "The system is not configured with Laf environment",
"not_yet_introduced": "No introduction yet",
"option": "Option",
"pay": {
"amount": "Amount",
"balance": "Account balance",

View File

@@ -29,6 +29,8 @@
"Custom outputs": "Custom outputs",
"Error": "Error",
"Read file result": "Document parsing result preview",
"User_select_description": "User select description",
"User_select_result": "User select result",
"read files": "parsed document"
},
"template": {

View File

@@ -143,14 +143,20 @@
"workflow": {
"Input guide": "填写说明",
"file_url": "文档链接",
"option1": "选项 1",
"option2": "选项 2",
"read_files": "文档解析",
"read_files_result": "文档解析结果",
"read_files_result_desc": "文档原文,由文件名和文档内容组成,多个文件之间通过横线隔开。",
"read_files_tip": "解析对话中所有上传的文档,并返回对应文档内容",
"select_description": "说明文字",
"select_result": "选择的结果",
"template": {
"communication": "通信"
},
"user_file_input": "文件链接",
"user_file_input_desc": "用户上传的文档和图片链接"
"user_file_input_desc": "用户上传的文档和图片链接",
"user_select": "用户选择",
"user_select_tip": "该模块可配置多个选项,以供对话时选择。不同选项可导向不同工作流支线"
}
}

View File

@@ -1,37 +1,38 @@
{
"Delete_all": "清空词库",
"chat_history": "聊天记录",
"chat_input_guide_lexicon_is_empty": "还没有配置词库",
"citations": "{{num}}条引用",
"click_contextual_preview": "点击查看上下文预览",
"config_input_guide": "配置输入引导",
"config_input_guide_lexicon": "配置词库",
"config_input_guide_lexicon_title": "配置词库",
"content_empty": "内容为空",
"contextual": "{{num}}条上下文",
"contextual_preview": "上下文预览 {{num}} 条",
"csv_input_lexicon_tip": "仅支持 CSV 批量导入,点击下载模板",
"custom_input_guide_url": "自定义词库地址",
"delete_all_input_guide_confirm": "确定要清空输入引导词库吗?",
"empty_directory": "这个目录已经没东西可选了~",
"file_amount_over": "超出最大文件数量 {{max}}",
"in_progress": "进行中",
"input_guide": "输入引导",
"input_guide_lexicon": "词库",
"input_guide_tip": "可以配置一些预设的问题。在用户输入问题时,会从这些预设问题中获取相关问题进行提示。",
"insert_input_guide,_some_data_already_exists": "有重复数据,已自动过滤,共插入 {{len}} 条数据",
"new_input_guide_lexicon": "新词库",
"is_chatting": "正在聊天中...请等待结束",
"content_empty": "内容为空",
"contextual": "{{num}}条上下文",
"contextual_preview": "上下文预览 {{num}} 条",
"items": "条",
"view_citations": "查看引用",
"citations": "{{num}}条引用",
"click_contextual_preview": "点击查看上下文预览",
"multiple_AI_conversations": "多组 AI 对话",
"module_runtime_and": "模块运行时间和",
"empty_directory": "这个目录已经没东西可选了~",
"chat_history": "聊天记录",
"stream_output": "流输出",
"multiple_AI_conversations": "多组 AI 对话",
"new_chat": "新对话",
"new_input_guide_lexicon": "新词库",
"no_workflow_response": "没有运行数据",
"plugins_output": "插件输出",
"in_progress": "进行中",
"question_tip": "从上到下,为各个模块的响应顺序",
"rearrangement": "检索结果重排",
"web_site_sync": "Web站点同步",
"new_chat": "新对话",
"select_file": "选择文件",
"select_img": "选择图片"
"select_img": "选择图片",
"stream_output": "流输出",
"view_citations": "查看引用",
"web_site_sync": "Web站点同步"
}

View File

@@ -537,7 +537,8 @@
"module tokens": "总 tokens",
"plugin output": "插件输出值",
"search using reRank": "结果重排",
"text output": "文本输出"
"text output": "文本输出",
"user_select_result": "用户选择结果"
},
"retry": "重新生成",
"tts": {
@@ -771,6 +772,7 @@
},
"module": {
"Add question type": "添加问题类型",
"Add_option": "添加选项",
"Can not connect self": "不能连接自身",
"Confirm Delete Node": "确认删除该节点?",
"Data Type": "数据类型",
@@ -781,6 +783,7 @@
"Default Value": "默认值",
"Default value": "默认值",
"Default value placeholder": "不填则默认返回空字符",
"Diagram": "示意图",
"Edit intro": "编辑描述",
"Field Description": "字段描述",
"Field Name": "字段名",
@@ -958,7 +961,9 @@
"OnRevert version confirm": "确认回退至该版本?会为您保存编辑中版本的配置,并为回退版本创建一个新的发布版本。",
"histories": "发布记录"
},
"run_test": "运行",
"template": {
"Interactive": "交互",
"Multimodal": "多模态",
"Search": "搜索"
},
@@ -1070,6 +1075,7 @@
"no_data": "暂无数据",
"no_laf_env": "系统未配置Laf环境",
"not_yet_introduced": "暂无介绍",
"option": "选项",
"pay": {
"amount": "金额",
"balance": "账号余额",

View File

@@ -6,7 +6,9 @@
"Reset template confirm": "确认还原代码模板?将会重置所有输入和输出至模板值,请注意保存当前代码。"
},
"confirm_delete_field_tip": "确认删除该字段?",
"create_link_error": "创建链接异常",
"custom_input": "自定义输入",
"delete_api": "确认删除该API密钥删除后该密钥立即失效对应的对话日志不会删除请确认",
"edit_input": "编辑输入",
"field_description": "字段描述",
"field_description_placeholder": "描述该输入字段的功能,如果为工具调用参数,则该描述会影响模型生成的质量",
@@ -27,6 +29,8 @@
"Custom outputs": "自定义输出",
"Error": "错误信息",
"Read file result": "文档解析结果预览",
"User_select_description": "说明文字",
"User_select_result": "选择的结果",
"read files": "解析的文档"
},
"template": {
@@ -40,8 +44,6 @@
"workflow_start": "流程开始"
},
"tool_input": "工具参数",
"variable_picker_tips": "可输入节点名或变量名搜索",
"delete_api": "确认删除该API密钥删除后该密钥立即失效对应的对话日志不会删除请确认",
"create_link_error": "创建链接异常",
"update_link_error": "更新链接异常"
"update_link_error": "更新链接异常",
"variable_picker_tips": "可输入节点名或变量名搜索"
}

View File

@@ -34,6 +34,7 @@ const Button = defineStyleConfig({
transform: 'scale(0.98)'
},
_disabled: {
transform: 'none !important',
_hover: {
filter: 'none'
}

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 22 KiB

View File

@@ -18,7 +18,12 @@ import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { uploadFile2DB } from '@/web/common/file/controller';
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from '../type';
import {
ChatBoxInputFormType,
ChatBoxInputType,
SendPromptFnType,
UserInputFileItemType
} from '../type';
import { textareaMinH } from '../constants';
import { UseFormReturn, useFieldArray } from 'react-hook-form';
import { ChatBoxContext } from '../Provider';
@@ -51,7 +56,7 @@ const ChatInput = ({
chatForm,
appId
}: {
onSendMessage: (val: ChatBoxInputType & { autoTTSResponse?: boolean }) => void;
onSendMessage: SendPromptFnType;
onStop: () => void;
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
resetInputVal: (val: ChatBoxInputType) => void;

View File

@@ -15,6 +15,8 @@ import { useCopyData } from '@/web/common/hooks/useCopyData';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import { SendPromptFnType } from '../type';
const colorMap = {
[ChatStatusEnum.loading]: {
bg: 'myGray.100',
@@ -30,16 +32,7 @@ const colorMap = {
}
};
const ChatItem = ({
type,
avatar,
statusBoxData,
children,
isLastChild,
questionGuides = [],
...chatControllerProps
}: {
type: ChatRoleEnum.Human | ChatRoleEnum.AI;
type BasicProps = {
avatar?: string;
statusBoxData?: {
status: `${ChatStatusEnum}`;
@@ -47,7 +40,28 @@ const ChatItem = ({
};
questionGuides?: string[];
children?: React.ReactNode;
} & ChatControllerProps) => {
} & ChatControllerProps;
type UserItemType = BasicProps & {
type: ChatRoleEnum.Human;
onSendMessage: undefined;
};
type AiItemType = BasicProps & {
type: ChatRoleEnum.AI;
onSendMessage: SendPromptFnType;
};
type Props = UserItemType | AiItemType;
const ChatItem = ({
type,
avatar,
statusBoxData,
children,
isLastChild,
questionGuides = [],
onSendMessage,
...chatControllerProps
}: Props) => {
const styleMap: BoxProps =
type === ChatRoleEnum.Human
? {
@@ -96,12 +110,13 @@ const ChatItem = ({
isLastChild={isLastChild}
isChatting={isChatting}
questionGuides={questionGuides}
onSendMessage={onSendMessage}
/>
);
})}
</Flex>
);
}, [chat, isChatting, isLastChild, questionGuides, type]);
}, [chat, isChatting, isLastChild, onSendMessage, questionGuides, type]);
const chatStatusMap = useMemo(() => {
if (!statusBoxData?.status) return;

View File

@@ -11,7 +11,6 @@ import React, {
import Script from 'next/script';
import type {
AIChatItemValueItemType,
ChatHistoryItemResType,
ChatSiteItemType,
UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type.d';
@@ -34,7 +33,12 @@ import type { AdminMarkType } from './components/SelectMarkCollection';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { postQuestionGuide } from '@/web/core/ai/api';
import type { ComponentRef, ChatBoxInputType, ChatBoxInputFormType } from './type.d';
import type {
ComponentRef,
ChatBoxInputType,
ChatBoxInputFormType,
SendPromptFnType
} from './type.d';
import type { StartChatFnProps, generatingMessageProps } from '../type';
import ChatInput from './Input/ChatInput';
import ChatBoxDivider from '../../Divider';
@@ -151,6 +155,16 @@ const ChatBox = (
isChatting
} = useContextSelector(ChatBoxContext, (v) => v);
const isInteractive = useMemo(() => {
const lastAIHistory = chatHistories[chatHistories.length - 1];
if (!lastAIHistory) return false;
const lastAIMessage = lastAIHistory.value as AIChatItemValueItemType[];
const interactiveContent = lastAIMessage?.find(
(item) => item.type === ChatItemValueTypeEnum.interactive
)?.interactive?.params;
return !!interactiveContent;
}, [chatHistories]);
// compute variable input is finish.
const chatForm = useForm<ChatBoxInputFormType>({
defaultValues: {
@@ -201,6 +215,7 @@ const ChatBox = (
status,
name,
tool,
interactive,
autoTTSResponse,
variables
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
@@ -287,6 +302,16 @@ const ChatBox = (
};
} else if (event === SseResponseEventEnum.updateVariables && variables) {
variablesForm.reset(variables);
} else if (event === SseResponseEventEnum.interactive) {
const val: AIChatItemValueItemType = {
type: ChatItemValueTypeEnum.interactive,
interactive
};
return {
...item,
value: item.value.concat(val)
};
}
return item;
@@ -355,16 +380,8 @@ const ChatBox = (
/**
* user confirm send prompt
*/
const sendPrompt = useCallback(
({
text = '',
files = [],
history = chatHistories,
autoTTSResponse = false
}: ChatBoxInputType & {
autoTTSResponse?: boolean;
history?: ChatSiteItemType[];
}) => {
const sendPrompt: SendPromptFnType = useCallback(
({ text = '', files = [], history = chatHistories, autoTTSResponse = false }) => {
variablesForm.handleSubmit(
async (variables) => {
if (!onStartChat) return;
@@ -898,6 +915,7 @@ const ChatBox = (
onRetry={retryInput(item.dataId)}
onDelete={delOneMessage(item.dataId)}
isLastChild={index === chatHistories.length - 1}
onSendMessage={undefined}
/>
)}
{item.obj === ChatRoleEnum.AI && (
@@ -907,7 +925,8 @@ const ChatBox = (
avatar={appAvatar}
chat={item}
isLastChild={index === chatHistories.length - 1}
{...(item.obj === ChatRoleEnum.AI && {
onSendMessage={sendPrompt}
{...{
showVoiceIcon,
shareId,
outLinkUid,
@@ -923,7 +942,7 @@ const ChatBox = (
onCloseUserLike: onCloseUserLike(item),
onAddUserDislike: onAddUserDislike(item),
onReadUserDislike: onReadUserDislike(item)
})}
}}
>
<ResponseTags
showTags={index !== chatHistories.length - 1 || !isChatting}
@@ -973,7 +992,7 @@ const ChatBox = (
</Box>
</Box>
{/* message input */}
{onStartChat && chatStarted && active && appId && (
{onStartChat && chatStarted && active && appId && !isInteractive && (
<ChatInput
onSendMessage={sendPrompt}
onStop={() => chatController.current?.abort('stop')}

View File

@@ -29,6 +29,16 @@ export type ChatBoxInputType = {
files?: UserInputFileItemType[];
};
export type SendPromptFnType = ({
text,
files,
history,
autoTTSResponse
}: ChatBoxInputType & {
autoTTSResponse?: boolean;
history?: ChatSiteItemType[];
}) => void;
export type ComponentRef = {
restartChat: () => void;
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;

View File

@@ -1,7 +1,7 @@
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { ChatItemValueItemType, ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import { ChatBoxInputType, UserInputFileItemType } from './type';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => {
if (!value) {
@@ -37,3 +37,37 @@ export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): Chat
files
};
};
export const setUserSelectResultToHistories = (
histories: ChatSiteItemType[],
selectVal: string
): ChatSiteItemType[] => {
if (histories.length === 0) return histories;
// @ts-ignore
return histories.map((item, i) => {
if (i !== histories.length - 1) return item;
item.value;
const value = item.value.map((val) => {
if (val.type !== ChatItemValueTypeEnum.interactive || !val.interactive) return val;
return {
...val,
interactive: {
...val.interactive,
params: {
...val.interactive.params,
userSelectedVal: val.interactive.params.userSelectOptions.find(
(item) => item.value === selectVal
)?.value
}
}
};
});
return {
...item,
value
};
});
};

View File

@@ -1,6 +1,7 @@
import { StreamResponseType } from '@/web/common/api/fetch';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ChatSiteItemType, ToolModuleResponseItemType } from '@fastgpt/global/core/chat/type';
import { InteractiveNodeResponseItemType } from '@fastgpt/global/core/workflow/template/system/userSelect/type';
export type generatingMessageProps = {
event: SseResponseEventEnum;
@@ -8,6 +9,7 @@ export type generatingMessageProps = {
name?: string;
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;
interactive?: InteractiveNodeResponseItemType;
variables?: Record<string, any>;
};

View File

@@ -6,7 +6,9 @@ import {
AccordionIcon,
AccordionItem,
AccordionPanel,
Box
Box,
Button,
Flex
} from '@chakra-ui/react';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import {
@@ -17,6 +19,10 @@ import {
import React from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import Avatar from '@fastgpt/web/components/common/Avatar';
import { SendPromptFnType } from '../ChatContainer/ChatBox/type';
import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { setUserSelectResultToHistories } from '../ChatContainer/ChatBox/utils';
type props = {
value: UserChatItemValueItemType | AIChatItemValueItemType;
@@ -25,10 +31,21 @@ type props = {
isLastChild: boolean;
isChatting: boolean;
questionGuides: string[];
onSendMessage?: SendPromptFnType;
};
const AIResponseBox = ({ value, index, chat, isLastChild, isChatting, questionGuides }: props) => {
if (value.text) {
const AIResponseBox = ({
value,
index,
chat,
isLastChild,
isChatting,
questionGuides,
onSendMessage
}: props) => {
const chatHistories = useContextSelector(ChatBoxContext, (v) => v.chatHistories);
if (value.type === ChatItemValueTypeEnum.text && value.text) {
let source = (value.text?.content || '').trim();
// First empty line
@@ -126,6 +143,45 @@ ${toolResponse}`}
</Box>
);
}
if (
value.type === ChatItemValueTypeEnum.interactive &&
value.interactive &&
value.interactive.type === 'userSelect'
) {
return (
<Flex flexDirection={'column'} gap={2} minW={'200px'} maxW={'250px'}>
{value.interactive.params.userSelectOptions?.map((option) => {
const selected = option.value === value.interactive?.params?.userSelectedVal;
return (
<Button
key={option.key}
variant={'whitePrimary'}
isDisabled={!isLastChild && value.interactive?.params?.userSelectedVal !== undefined}
{...(selected
? {
_disabled: {
cursor: 'default',
borderColor: 'primary.300',
bg: 'primary.50 !important',
color: 'primary.600'
}
}
: {})}
onClick={() => {
onSendMessage?.({
text: option.value,
history: setUserSelectResultToHistories(chatHistories, option.value)
});
}}
>
{option.value}
</Button>
);
})}
</Flex>
);
}
return null;
};

View File

@@ -17,6 +17,7 @@ import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
type sideTabItemType = {
moduleLogo?: string;
@@ -124,7 +125,11 @@ const WholeResponseModal = ({
</Flex>
}
>
{response?.length && <ResponseBox response={response} showDetail={showDetail} />}
{!!response?.length ? (
<ResponseBox response={response} showDetail={showDetail} />
) : (
<EmptyTip text={t('chat:no_workflow_response')} />
)}
</MyModal>
);
};
@@ -480,6 +485,12 @@ export const WholeResponseContent = ({
value={activeModule?.readFilesResult}
/>
</>
{/* user select */}
<Row
label={t('common:core.chat.response.user_select_result')}
value={activeModule?.userSelectResult}
/>
</Box>
)}
</>

View File

@@ -9,8 +9,7 @@ import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { dispatchWorkFlow } from '@fastgpt/service/core/workflow/dispatch';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
import { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
import { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type';
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
import { removeEmptyUserInput } from '@fastgpt/global/core/chat/utils';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
@@ -22,11 +21,18 @@ import { NextAPI } from '@/service/middleware/entry';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
import {
getWorkflowEntryNodeIds,
initWorkflowEdgeStatus,
rewriteNodeOutputByHistories,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
export type Props = {
messages: ChatCompletionMessageParam[];
nodes: RuntimeNodeItemType[];
edges: RuntimeEdgeItemType[];
nodes: StoreNodeItemType[];
edges: StoreEdgeItemType[];
variables: Record<string, any>;
appId: string;
appName: string;
@@ -52,8 +58,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
chatConfig
} = req.body as Props;
try {
// [histories, user]
const chatMessages = GPTMessages2Chats(messages);
const userInput = chatMessages.pop()?.value as UserChatItemValueItemType[] | undefined;
/* user auth */
@@ -64,6 +70,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
authToken: true
})
]);
// auth balance
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
const isPlugin = app.type === AppTypeEnum.plugin;
if (!Array.isArray(nodes)) {
@@ -73,18 +82,19 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
throw new Error('Edges is not array');
}
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, chatMessages));
// Plugin need to replace inputs
if (isPlugin) {
nodes = updatePluginInputByVariables(nodes, variables);
variables = removePluginInputVariables(variables, nodes);
runtimeNodes = updatePluginInputByVariables(runtimeNodes, variables);
variables = removePluginInputVariables(variables, runtimeNodes);
} else {
if (!userInput) {
throw new Error('Params Error');
}
}
// auth balance
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
runtimeNodes = rewriteNodeOutputByHistories(chatMessages, runtimeNodes);
/* start process */
const { flowResponses, flowUsages } = await dispatchWorkFlow({
@@ -95,8 +105,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
tmbId,
user,
app,
runtimeNodes: nodes,
runtimeEdges: edges,
runtimeNodes,
runtimeEdges: initWorkflowEdgeStatus(edges, chatMessages),
variables,
query: removeEmptyUserInput(userInput),
chatConfig,

View File

@@ -13,7 +13,7 @@ import { dispatchWorkFlow } from '@fastgpt/service/core/workflow/dispatch';
import type { ChatCompletionCreateParams } from '@fastgpt/global/core/ai/type.d';
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import {
getDefaultEntryNodeIds,
getWorkflowEntryNodeIds,
getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes,
@@ -64,6 +64,7 @@ import {
getPluginRunContent
} from '@fastgpt/global/core/app/plugin/utils';
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
import { rewriteNodeOutputByHistories } from '@fastgpt/global/core/workflow/runtime/utils';
type FastGptWebChatProps = {
chatId?: string; // undefined: get histories from messages, '': new chat, 'xxxxx': get histories from db
@@ -225,24 +226,22 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
appId: app._id,
chatId,
limit,
field: `dataId obj value`
field: `dataId obj value nodeOutputs`
}),
getAppLatestVersion(app._id, app)
]);
const newHistories = concatHistories(histories, chatMessages);
// Get runtimeNodes
const runtimeNodes = isPlugin
? updatePluginInputByVariables(
storeNodes2RuntimeNodes(nodes, getDefaultEntryNodeIds(nodes)),
variables
)
: storeNodes2RuntimeNodes(nodes, getDefaultEntryNodeIds(nodes));
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories));
const runtimeVariables = removePluginInputVariables(
variables,
storeNodes2RuntimeNodes(nodes, getDefaultEntryNodeIds(nodes))
);
if (isPlugin) {
// Rewrite plugin run params variables
variables = removePluginInputVariables(variables, runtimeNodes);
runtimeNodes = updatePluginInputByVariables(runtimeNodes, variables);
}
runtimeNodes = rewriteNodeOutputByHistories(newHistories, runtimeNodes);
/* start flow controller */
const { flowResponses, flowUsages, assistantResponses, newVariables } = await (async () => {
@@ -258,8 +257,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
chatId,
responseChatItemId,
runtimeNodes,
runtimeEdges: initWorkflowEdgeStatus(edges),
variables: runtimeVariables,
runtimeEdges: initWorkflowEdgeStatus(edges, newHistories),
variables,
query: removeEmptyUserInput(userQuestion.value),
chatConfig,
histories: newHistories,

View File

@@ -145,7 +145,7 @@ const Header = () => {
}
}}
>
{t('common:core.workflow.Debug')}
{t('common:core.workflow.run_test')}
</Button>
{!historiesDefaultData && (

View File

@@ -146,7 +146,7 @@ const Header = () => {
}
}}
>
{t('common:core.workflow.Debug')}
{t('common:core.workflow.run_test')}
</Button>
{!historiesDefaultData && (

View File

@@ -56,7 +56,8 @@ const nodeTypes: Record<FlowNodeTypeEnum, any> = {
[FlowNodeTypeEnum.lafModule]: dynamic(() => import('./nodes/NodeLaf')),
[FlowNodeTypeEnum.ifElseNode]: dynamic(() => import('./nodes/NodeIfElse')),
[FlowNodeTypeEnum.variableUpdate]: dynamic(() => import('./nodes/NodeVariableUpdate')),
[FlowNodeTypeEnum.code]: dynamic(() => import('./nodes/NodeCode'))
[FlowNodeTypeEnum.code]: dynamic(() => import('./nodes/NodeCode')),
[FlowNodeTypeEnum.userSelect]: dynamic(() => import('./nodes/NodeUserSelect'))
};
const edgeTypes = {
[EDGE_TYPE]: ButtonEdge

View File

@@ -0,0 +1,144 @@
import React, { useMemo } from 'react';
import { NodeProps, Position } from 'reactflow';
import { Box, Button, HStack, Input } from '@chakra-ui/react';
import NodeCard from './render/NodeCard';
import { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
import Container from '../components/Container';
import RenderInput from './render/RenderInput';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io.d';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { SourceHandle } from './render/Handle';
import { getHandleId } from '@fastgpt/global/core/workflow/utils';
import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '../../context';
import { UserSelectOptionItemType } from '@fastgpt/global/core/workflow/template/system/userSelect/type';
import IOTitle from '../components/IOTitle';
import RenderOutput from './render/RenderOutput';
const NodeUserSelect = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
const { nodeId, inputs, outputs } = data;
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
const CustomComponent = useMemo(
() => ({
[NodeInputKeyEnum.userSelectOptions]: ({
key: optionKey,
value = [],
...props
}: FlowNodeInputItemType) => {
const options = value as UserSelectOptionItemType[];
return (
<Box>
{options.map((item, i) => (
<Box key={item.key} mb={4}>
<HStack spacing={1}>
<MyTooltip label={t('common:common.Delete')}>
<MyIcon
mt={0.5}
name={'minus'}
w={'0.8rem'}
cursor={'pointer'}
color={'myGray.600'}
_hover={{ color: 'red.600' }}
onClick={() => {
onChangeNode({
nodeId,
type: 'updateInput',
key: optionKey,
value: {
...props,
key: optionKey,
value: options.filter((input) => input.key !== item.key)
}
});
onChangeNode({
nodeId,
type: 'delOutput',
key: item.key
});
}}
/>
</MyTooltip>
<Box color={'myGray.600'} fontWeight={'medium'} fontSize={'sm'}>
{t('common:option') + (i + 1)}
</Box>
</HStack>
<Box position={'relative'}>
<Input
mt={1}
defaultValue={item.value}
bg={'white'}
fontSize={'sm'}
onChange={(e) => {
const newVal = options.map((val) =>
val.key === item.key
? {
...val,
value: e.target.value
}
: val
);
onChangeNode({
nodeId,
type: 'updateInput',
key: optionKey,
value: {
...props,
key: optionKey,
value: newVal
}
});
}}
/>
<SourceHandle
nodeId={nodeId}
handleId={getHandleId(nodeId, 'source', item.key)}
position={Position.Right}
translate={[26, 0]}
/>
</Box>
</Box>
))}
<Button
fontSize={'sm'}
leftIcon={<MyIcon name={'common/addLight'} w={4} />}
onClick={() => {
onChangeNode({
nodeId,
type: 'updateInput',
key: optionKey,
value: {
...props,
key: optionKey,
value: options.concat({ value: '', key: getNanoid() })
}
});
}}
>
{t('common:core.module.Add_option')}
</Button>
</Box>
);
}
}),
[nodeId, onChangeNode, t]
);
return (
<NodeCard minW={'400px'} selected={selected} {...data}>
<Container>
<RenderInput nodeId={nodeId} flowInputList={inputs} CustomComponent={CustomComponent} />
</Container>
<Container>
<IOTitle text={t('common:common.Output')} />
<RenderOutput nodeId={nodeId} flowOutputList={outputs} />
</Container>
</NodeCard>
);
};
export default React.memo(NodeUserSelect);

View File

@@ -1,5 +1,5 @@
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import { Box, Button, Card, Flex } from '@chakra-ui/react';
import { Box, Button, Card, Flex, Image } from '@chakra-ui/react';
import MyIcon from '@fastgpt/web/components/common/Icon';
import Avatar from '@fastgpt/web/components/common/Avatar';
import type { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
@@ -42,7 +42,6 @@ type Props = FlowNodeItemType & {
const NodeCard = (props: Props) => {
const { t } = useTranslation();
const { appT } = useI18n();
const { toast } = useToast();
@@ -70,7 +69,7 @@ const NodeCard = (props: Props) => {
// custom title edit
const { onOpenModal: onOpenCustomTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('common:common.Custom Title'),
placeholder: appT('module.Custom Title Tip') || ''
placeholder: t('app:module.Custom Title Tip') || ''
});
const showToolHandle = useMemo(
@@ -166,7 +165,7 @@ const NodeCard = (props: Props) => {
onSuccess: (e) => {
if (!e) {
return toast({
title: appT('modules.Title is required'),
title: t('app:modules.Title is required'),
status: 'warning'
});
}
@@ -183,7 +182,7 @@ const NodeCard = (props: Props) => {
)}
<Box flex={1} />
{hasNewVersion && (
<MyTooltip label={appT('app.modules.click to update')}>
<MyTooltip label={t('app:app.modules.click to update')}>
<Button
bg={'yellow.50'}
color={'yellow.600'}
@@ -197,11 +196,29 @@ const NodeCard = (props: Props) => {
_hover={{ bg: 'yellow.100' }}
onClick={onOpenConfirmSync(onClickSyncVersion)}
>
<Box>{appT('app.modules.has new version')}</Box>
<Box>{t('app:app.modules.has new version')}</Box>
<QuestionOutlineIcon ml={1} />
</Button>
</MyTooltip>
)}
{!!nodeTemplate?.diagram && (
<MyTooltip
label={
<Image src={nodeTemplate?.diagram} w={'100%'} minH={['auto', '200px']} alt={''} />
}
>
<Box
fontSize={'sm'}
color={'primary.700'}
p={1}
rounded={'sm'}
cursor={'default'}
_hover={{ bg: 'rgba(17, 24, 36, 0.05)' }}
>
{t('common:core.module.Diagram')}
</Box>
</MyTooltip>
)}
</Flex>
<MenuRender nodeId={nodeId} menuForbid={menuForbid} />
<NodeIntro nodeId={nodeId} intro={intro} />
@@ -217,9 +234,9 @@ const NodeCard = (props: Props) => {
name,
menuForbid,
hasNewVersion,
appT,
onOpenConfirmSync,
onClickSyncVersion,
nodeTemplate?.diagram,
intro,
ConfirmSyncModal,
onOpenCustomTitleModal,

View File

@@ -621,7 +621,6 @@ const WorkflowContextProvider = ({
},
appId
});
// console.log({ finishedEdges, finishedNodes, nextStepRunNodes, flowResponses });
// 5. Store debug result
const newStoreDebugData = {
runtimeNodes: finishedNodes,

View File

@@ -2,13 +2,7 @@ import { useUserStore } from '@/web/support/user/useUserStore';
import React from 'react';
import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type';
import { streamFetch } from '@/web/common/api/fetch';
import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
import {
getDefaultEntryNodeIds,
getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
import { getMaxHistoryLimitFromNodes } from '@fastgpt/global/core/workflow/runtime/utils';
import { useMemoizedFn } from 'ahooks';
import { useContextSelector } from 'use-context-selector';
import { AppContext } from './context';
@@ -47,8 +41,8 @@ export const useChatTest = ({
data: {
// Send histories and user messages
messages: messages.slice(-historyMaxLen - 2),
nodes: storeNodes2RuntimeNodes(nodes, getDefaultEntryNodeIds(nodes)),
edges: initWorkflowEdgeStatus(edges),
nodes,
edges,
variables,
appId: appDetail._id,
appName: `调试-${appDetail.name}`,

View File

@@ -6,7 +6,7 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import { delay } from '@fastgpt/global/common/system/utils';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import {
getDefaultEntryNodeIds,
getWorkflowEntryNodeIds,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
@@ -38,7 +38,7 @@ export const getScheduleTriggerApp = async () => {
teamId: String(app.teamId),
tmbId: String(app.tmbId),
app,
runtimeNodes: storeNodes2RuntimeNodes(app.modules, getDefaultEntryNodeIds(app.modules)),
runtimeNodes: storeNodes2RuntimeNodes(app.modules, getWorkflowEntryNodeIds(app.modules)),
runtimeEdges: initWorkflowEdgeStatus(app.edges),
variables: {},
query: [

View File

@@ -201,6 +201,11 @@ export const streamFetch = ({
event,
variables: parseJson
});
} else if (event === SseResponseEventEnum.interactive) {
responseQueue.push({
event,
...parseJson
});
} else if (event === SseResponseEventEnum.error) {
if (parseJson.statusText === TeamErrEnum.aiPointsNotEnough) {
useSystemStore.getState().setIsNotSufficientModal(true);