Update userselect ux (#2610)

* perf: user select ux and api

* perf: http variables replace code

* perf: http variables replace code

* perf: chat box question guide adapt interactive

* remove comment
This commit is contained in:
Archer
2024-09-04 11:11:08 +08:00
committed by GitHub
parent 85a11d08b2
commit 64708ea424
21 changed files with 1083 additions and 949 deletions

View File

@@ -71,16 +71,17 @@ curl --location --request POST 'https://{{host}}/api/admin/initv4810' \
22. 优化 - 知识库详情页 UI。 22. 优化 - 知识库详情页 UI。
23. 优化 - 支持无网络配置情况下运行。 23. 优化 - 支持无网络配置情况下运行。
24. 优化 - 部分全局变量,增加数据类型约束。 24. 优化 - 部分全局变量,增加数据类型约束。
25. 修复 - 全局变量 key 可能重复 25. 优化 - 查看工作流详情,切换 tab 时,自动滚动到顶部
26. 修复 - Prompt 模式调用工具stream=false 模式下,会携带 0: 开头标记 26. 修复 - 全局变量 key 可能重复
27. 修复 - 对话日志鉴权问题:仅为 APP 管理员的用户,无法查看对话日志详情 27. 修复 - Prompt 模式调用工具stream=false 模式下,会携带 0: 开头标记
28. 修复 - 选择 Milvus 部署时,无法导出知识库。 28. 修复 - 对话日志鉴权问题:仅为 APP 管理员的用户,无法查看对话日志详情。
29. 修复 - 创建 APP 副本,无法复制系统配置。 29. 修复 - 选择 Milvus 部署时,无法导出知识库。
30. 修复 - 图片识别模式下,自动解析图片链接正则不够严谨问题 30. 修复 - 创建 APP 副本,无法复制系统配置
31. 修复 - 内容提取的数据类型与输出数据类型未一致 31. 修复 - 图片识别模式下,自动解析图片链接正则不够严谨问题
32. 修复 - 工作流运行时间统计错误 32. 修复 - 内容提取的数据类型与输出数据类型未一致
33. 修复 - stream 模式下,工具调用有可能出现 undefined 33. 修复 - 工作流运行时间统计错误。
34. 修复 - 全局变量在 API 中无法持久化。 34. 修复 - stream 模式下,工具调用有可能出现 undefined
35. 修复 - OpenAPIdetail=false模式下不应该返回 tool 调用结果,仅返回文字。(可解决 cow 不适配问题) 35. 修复 - 全局变量在 API 中无法持久化。
36. 修复 - 知识库标签重复加载。 36. 修复 - OpenAPIdetail=false模式下不应该返回 tool 调用结果,仅返回文字。(可解决 cow 不适配问题)
37. 修复 - Debug 模式下,循环调用边问题 37. 修复 - 知识库标签重复加载
38. 修复 - Debug 模式下,循环调用边问题。

View File

@@ -118,7 +118,7 @@ export const chats2GPTMessages = ({
tool_calls tool_calls
}) })
.concat(toolResponse); .concat(toolResponse);
} else if (value.text) { } else if (value.text?.content) {
results.push({ results.push({
dataId, dataId,
role: ChatCompletionRequestMessageRoleEnum.Assistant, role: ChatCompletionRequestMessageRoleEnum.Assistant,
@@ -142,7 +142,7 @@ export const GPTMessages2Chats = (
messages: ChatCompletionMessageParam[], messages: ChatCompletionMessageParam[],
reserveTool = true reserveTool = true
): ChatItemType[] => { ): ChatItemType[] => {
return messages const chatMessages = messages
.map((item) => { .map((item) => {
const value: ChatItemType['value'] = []; const value: ChatItemType['value'] = [];
const obj = GPT2Chat[item.role]; const obj = GPT2Chat[item.role];
@@ -288,6 +288,22 @@ export const GPTMessages2Chats = (
} as ChatItemType; } as ChatItemType;
}) })
.filter((item) => item.value.length > 0); .filter((item) => item.value.length > 0);
// Merge data with the same dataId
const result = chatMessages.reduce((result: ChatItemType[], currentItem) => {
const lastItem = result[result.length - 1];
if (lastItem && lastItem.dataId === currentItem.dataId && lastItem.obj === currentItem.obj) {
// @ts-ignore
lastItem.value = lastItem.value.concat(currentItem.value);
} else {
result.push(currentItem);
}
return result;
}, []);
return result;
}; };
export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): RuntimeUserPromptType => { export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): RuntimeUserPromptType => {

View File

@@ -22,6 +22,7 @@ export enum DispatchNodeResponseKeyEnum {
childrenResponses = 'childrenResponses', // Some nodes make recursive calls that need to be returned childrenResponses = 'childrenResponses', // Some nodes make recursive calls that need to be returned
toolResponses = 'toolResponses', // The result is passed back to the tool node for use toolResponses = 'toolResponses', // The result is passed back to the tool node for use
assistantResponses = 'assistantResponses', // assistant response assistantResponses = 'assistantResponses', // assistant response
rewriteHistories = 'rewriteHistories', // If have the response, workflow histories will be rewrite
interactive = 'INTERACTIVE' // is interactive interactive = 'INTERACTIVE' // is interactive
} }

View File

@@ -2,9 +2,9 @@ import { ChatNodeUsageType } from '../../../support/wallet/bill/type';
import { import {
ChatItemType, ChatItemType,
UserChatItemValueItemType, UserChatItemValueItemType,
ChatItemValueItemType,
ToolRunResponseItemType, ToolRunResponseItemType,
NodeOutputItemType NodeOutputItemType,
AIChatItemValueItemType
} from '../../chat/type'; } from '../../chat/type';
import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../type/io.d'; import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../type/io.d';
import { StoreNodeItemType } from '../type/node'; import { StoreNodeItemType } from '../type/node';
@@ -173,13 +173,14 @@ export type DispatchNodeResponseType = {
updateVarResult?: any[]; updateVarResult?: any[];
}; };
export type DispatchNodeResultType<T> = { export type DispatchNodeResultType<T = {}> = {
[DispatchNodeResponseKeyEnum.skipHandleId]?: string[]; // skip some edge handle id [DispatchNodeResponseKeyEnum.skipHandleId]?: string[]; // skip some edge handle id
[DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail [DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; // Node total usage [DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; // Node total usage
[DispatchNodeResponseKeyEnum.childrenResponses]?: DispatchNodeResultType[]; // Children node response [DispatchNodeResponseKeyEnum.childrenResponses]?: DispatchNodeResultType[]; // Children node response
[DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType; // Tool response [DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType; // Tool response
[DispatchNodeResponseKeyEnum.assistantResponses]?: ChatItemValueItemType[]; // Assistant response(Store to db) [DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // Assistant response(Store to db)
[DispatchNodeResponseKeyEnum.rewriteHistories]?: ChatItemType[];
} & T; } & T;
/* Single node props */ /* Single node props */

View File

@@ -27,16 +27,24 @@ export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number
return limit * 2; return limit * 2;
}; };
/*
Get interaction information (if any) from the last AI message.
What can be done:
1. Get the interactive data
2. Check that the workflow starts at the interaction node
*/
export const getLastInteractiveValue = (histories: ChatItemType[]) => { export const getLastInteractiveValue = (histories: ChatItemType[]) => {
const lastAIMessage = histories.findLast((item) => item.obj === ChatRoleEnum.AI); const lastAIMessage = histories.findLast((item) => item.obj === ChatRoleEnum.AI);
if (lastAIMessage) { if (lastAIMessage) {
const interactiveValue = lastAIMessage.value.find( const lastValue = lastAIMessage.value[lastAIMessage.value.length - 1];
(v) => v.type === ChatItemValueTypeEnum.interactive
);
if (interactiveValue && 'interactive' in interactiveValue) { if (
return interactiveValue.interactive; lastValue &&
lastValue.type === ChatItemValueTypeEnum.interactive &&
!!lastValue.interactive
) {
return lastValue.interactive;
} }
} }

View File

@@ -15,7 +15,7 @@ type InteractiveBasicType = {
type UserSelectInteractive = { type UserSelectInteractive = {
type: 'userSelect'; type: 'userSelect';
params: { params: {
// description: string; description: string;
userSelectOptions: UserSelectOptionItemType[]; userSelectOptions: UserSelectOptionItemType[];
userSelectedVal?: string; userSelectedVal?: string;
}; };

View File

@@ -3,7 +3,7 @@ import { getAIApi } from '../config';
import { countGptMessagesTokens } from '../../../common/string/tiktoken/index'; import { countGptMessagesTokens } from '../../../common/string/tiktoken/index';
import { loadRequestMessages } from '../../chat/utils'; import { loadRequestMessages } from '../../chat/utils';
export const Prompt_QuestionGuide = `你是一个AI智能助手可以回答和解决我的问题。请结合前面的对话记录帮我生成 3 个问题引导我继续提问。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`; export const Prompt_QuestionGuide = `你是一个AI智能助手可以回答和解决我的问题。请结合前面的对话记录帮我生成 3 个问题,引导我继续提问,生成问题的语言要与原问题相同。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`;
export async function createQuestionGuide({ export async function createQuestionGuide({
messages, messages,
@@ -19,6 +19,7 @@ export async function createQuestionGuide({
content: Prompt_QuestionGuide content: Prompt_QuestionGuide
} }
]; ];
const ai = getAIApi({ const ai = getAIApi({
timeout: 480000 timeout: 480000
}); });

View File

@@ -1,7 +1,7 @@
import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type'; import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { MongoChatItem } from './chatItemSchema'; import { MongoChatItem } from './chatItemSchema';
import { addLog } from '../../common/system/log'; import { addLog } from '../../common/system/log';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants'; import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { delFileByFileIdList, getGFSCollection } from '../../common/file/gridfs/controller'; import { delFileByFileIdList, getGFSCollection } from '../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants'; import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { MongoChat } from './chatSchema'; import { MongoChat } from './chatSchema';
@@ -80,52 +80,6 @@ export const addCustomFeedbacks = async ({
} }
}; };
/*
Update the user selected index of the interactive module
*/
export const updateUserSelectedResult = async ({
appId,
chatId,
userSelectedVal
}: {
appId: string;
chatId?: string;
userSelectedVal: string;
}) => {
if (!chatId) return;
try {
const chatItem = await MongoChatItem.findOne(
{ appId, chatId, obj: ChatRoleEnum.AI },
'value'
).sort({ _id: -1 });
if (!chatItem) return;
const interactiveValue = chatItem.value.find(
(v) => v.type === ChatItemValueTypeEnum.interactive
);
if (
!interactiveValue ||
interactiveValue.type !== ChatItemValueTypeEnum.interactive ||
!interactiveValue.interactive?.params
)
return;
interactiveValue.interactive = {
...interactiveValue.interactive,
params: {
...interactiveValue.interactive.params,
userSelectedVal
}
};
await chatItem.save();
} catch (error) {
addLog.error('updateUserSelectedResult error', error);
}
};
/* /*
Delete chat files Delete chat files
1. ChatId: Delete one chat files 1. ChatId: Delete one chat files

View File

@@ -1,6 +1,10 @@
import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type.d'; import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type.d';
import { MongoApp } from '../app/schema'; import { MongoApp } from '../app/schema';
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants'; import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatSourceEnum
} from '@fastgpt/global/core/chat/constants';
import { MongoChatItem } from './chatItemSchema'; import { MongoChatItem } from './chatItemSchema';
import { MongoChat } from './chatSchema'; import { MongoChat } from './chatSchema';
import { addLog } from '../../common/system/log'; import { addLog } from '../../common/system/log';
@@ -111,3 +115,85 @@ export async function saveChat({
addLog.error(`update chat history error`, error); addLog.error(`update chat history error`, error);
} }
} }
export const updateInteractiveChat = async ({
chatId,
appId,
teamId,
tmbId,
userSelectedVal,
aiResponse,
newVariables,
newTitle
}: {
chatId: string;
appId: string;
teamId: string;
tmbId: string;
userSelectedVal: string;
aiResponse: AIChatItemType & { dataId?: string };
newVariables?: Record<string, any>;
newTitle: string;
}) => {
if (!chatId) return;
const chatItem = await MongoChatItem.findOne({ appId, chatId, obj: ChatRoleEnum.AI }).sort({
_id: -1
});
if (!chatItem || chatItem.obj !== ChatRoleEnum.AI) return;
const interactiveValue = chatItem.value[chatItem.value.length - 1];
if (
!interactiveValue ||
interactiveValue.type !== ChatItemValueTypeEnum.interactive ||
!interactiveValue.interactive?.params
) {
return;
}
interactiveValue.interactive = {
...interactiveValue.interactive,
params: {
...interactiveValue.interactive.params,
userSelectedVal
}
};
if (aiResponse.customFeedbacks) {
chatItem.customFeedbacks = chatItem.customFeedbacks
? [...chatItem.customFeedbacks, ...aiResponse.customFeedbacks]
: aiResponse.customFeedbacks;
}
if (aiResponse.responseData) {
chatItem.responseData = chatItem.responseData
? [...chatItem.responseData, ...aiResponse.responseData]
: aiResponse.responseData;
}
if (aiResponse.value) {
chatItem.value = chatItem.value ? [...chatItem.value, ...aiResponse.value] : aiResponse.value;
}
await mongoSessionRun(async (session) => {
await chatItem.save({ session });
await MongoChat.updateOne(
{
appId,
chatId
},
{
$set: {
variables: newVariables,
title: newTitle,
updateTime: new Date()
}
},
{
session
}
);
});
};

View File

@@ -211,11 +211,40 @@ export const loadRequestMessages = async ({
}; };
} }
} }
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant) {
if (item.content !== undefined && !item.content) return;
if (Array.isArray(item.content) && item.content.length === 0) return;
}
return item; return item;
}) })
.filter(Boolean) as ChatCompletionMessageParam[]; .filter(Boolean) as ChatCompletionMessageParam[];
}; };
/*
Merge data for some consecutive roles
1. Contiguous assistant and both have content, merge content
*/
const mergeConsecutiveMessages = (
messages: ChatCompletionMessageParam[]
): ChatCompletionMessageParam[] => {
return messages.reduce((mergedMessages: ChatCompletionMessageParam[], currentMessage) => {
const lastMessage = mergedMessages[mergedMessages.length - 1];
if (
lastMessage &&
currentMessage.role === ChatCompletionRequestMessageRoleEnum.Assistant &&
lastMessage.role === ChatCompletionRequestMessageRoleEnum.Assistant &&
typeof lastMessage.content === 'string' &&
typeof currentMessage.content === 'string'
) {
lastMessage.content += currentMessage ? `\n${currentMessage.content}` : '';
} else {
mergedMessages.push(currentMessage);
}
return mergedMessages;
}, []);
};
if (messages.length === 0) { if (messages.length === 0) {
return Promise.reject('core.chat.error.Messages empty'); return Promise.reject('core.chat.error.Messages empty');
@@ -245,11 +274,22 @@ export const loadRequestMessages = async ({
...item, ...item,
content: await parseUserContent(item.content) content: await parseUserContent(item.content)
}; };
} else if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant) {
return {
role: item.role,
content: item.content,
function_call: item.function_call,
name: item.name,
refusal: item.refusal,
tool_calls: item.tool_calls
};
} else { } else {
return item; return item;
} }
}) })
)) as ChatCompletionMessageParam[]; )) as ChatCompletionMessageParam[];
return clearInvalidMessages(loadMessages) as SdkChatCompletionMessageParam[]; return mergeConsecutiveMessages(
clearInvalidMessages(loadMessages)
) as SdkChatCompletionMessageParam[];
}; };

View File

@@ -6,6 +6,7 @@ import {
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants'; import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { import type {
ChatDispatchProps, ChatDispatchProps,
DispatchNodeResultType,
ModuleDispatchProps, ModuleDispatchProps,
SystemVariablesType SystemVariablesType
} from '@fastgpt/global/core/workflow/runtime/type'; } from '@fastgpt/global/core/workflow/runtime/type';
@@ -145,14 +146,15 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
responseData, responseData,
nodeDispatchUsages, nodeDispatchUsages,
toolResponses, toolResponses,
assistantResponses assistantResponses,
}: { rewriteHistories
[NodeOutputKeyEnum.answerText]?: string; }: Omit<
[DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType; DispatchNodeResultType<{
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; [NodeOutputKeyEnum.answerText]?: string;
[DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType; [DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType;
[DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // tool module, save the response value }>,
} 'nodeResponse'
>
) { ) {
if (responseData) { if (responseData) {
chatResponses.push(responseData); chatResponses.push(responseData);
@@ -182,6 +184,10 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
}); });
} }
} }
if (rewriteHistories) {
histories = rewriteHistories;
}
} }
/* Pass the output of the node, to get next nodes and update edge status */ /* Pass the output of the node, to get next nodes and update edge status */
function nodeOutput( function nodeOutput(

View File

@@ -12,8 +12,6 @@ import type {
UserSelectInteractive, UserSelectInteractive,
UserSelectOptionItemType UserSelectOptionItemType
} from '@fastgpt/global/core/workflow/template/system/userSelect/type'; } from '@fastgpt/global/core/workflow/template/system/userSelect/type';
import { updateUserSelectedResult } from '../../../chat/controller';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt'; import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
type Props = ModuleDispatchProps<{ type Props = ModuleDispatchProps<{
@@ -30,6 +28,7 @@ export const dispatchUserSelect = async (props: Props): Promise<UserSelectRespon
const { const {
workflowStreamResponse, workflowStreamResponse,
runningAppInfo: { id: appId }, runningAppInfo: { id: appId },
histories,
chatId, chatId,
node: { nodeId, isEntry }, node: { nodeId, isEntry },
params: { description, userSelectOptions }, params: { description, userSelectOptions },
@@ -38,21 +37,11 @@ export const dispatchUserSelect = async (props: Props): Promise<UserSelectRespon
// Interactive node is not the entry node, return interactive result // Interactive node is not the entry node, return interactive result
if (!isEntry) { if (!isEntry) {
const answerText = description ? `\n${description}` : undefined;
if (answerText) {
workflowStreamResponse?.({
event: SseResponseEventEnum.fastAnswer,
data: textAdaptGptResponse({
text: answerText
})
});
}
return { return {
[NodeOutputKeyEnum.answerText]: answerText,
[DispatchNodeResponseKeyEnum.interactive]: { [DispatchNodeResponseKeyEnum.interactive]: {
type: 'userSelect', type: 'userSelect',
params: { params: {
description,
userSelectOptions userSelectOptions
} }
} }
@@ -70,14 +59,8 @@ export const dispatchUserSelect = async (props: Props): Promise<UserSelectRespon
}; };
} }
// Update db
updateUserSelectedResult({
appId,
chatId,
userSelectedVal
});
return { return {
[DispatchNodeResponseKeyEnum.rewriteHistories]: histories.slice(0, -2), // Removes the current session record as the history of subsequent nodes
[DispatchNodeResponseKeyEnum.skipHandleId]: userSelectOptions [DispatchNodeResponseKeyEnum.skipHandleId]: userSelectOptions
.filter((item) => item.value !== userSelectedVal) .filter((item) => item.value !== userSelectedVal)
.map((item: any) => getHandleId(nodeId, 'source', item.key)), .map((item: any) => getHandleId(nodeId, 'source', item.key)),

View File

@@ -99,6 +99,18 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
}; };
httpReqUrl = replaceVariable(httpReqUrl, allVariables); httpReqUrl = replaceVariable(httpReqUrl, allVariables);
const replaceStringVariables = (text: string) => {
return replaceVariable(
replaceEditorVariable({
text,
nodes: runtimeNodes,
variables: allVariables,
runningNode: node
}),
allVariables
);
};
// parse header // parse header
const headers = await (() => { const headers = await (() => {
try { try {
@@ -110,24 +122,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
if (!httpHeader || httpHeader.length === 0) return {}; if (!httpHeader || httpHeader.length === 0) return {};
// array // array
return httpHeader.reduce((acc: Record<string, string>, item) => { return httpHeader.reduce((acc: Record<string, string>, item) => {
const key = replaceVariable( const key = replaceStringVariables(item.key);
replaceEditorVariable({ const value = replaceStringVariables(item.value);
text: item.key,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
);
const value = replaceVariable(
replaceEditorVariable({
text: item.value,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
);
acc[key] = valueTypeFormat(value, WorkflowIOValueTypeEnum.string); acc[key] = valueTypeFormat(value, WorkflowIOValueTypeEnum.string);
return acc; return acc;
}, {}); }, {});
@@ -137,24 +133,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
})(); })();
const params = httpParams.reduce((acc: Record<string, string>, item) => { const params = httpParams.reduce((acc: Record<string, string>, item) => {
const key = replaceVariable( const key = replaceStringVariables(item.key);
replaceEditorVariable({ const value = replaceStringVariables(item.value);
text: item.key,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
);
const value = replaceVariable(
replaceEditorVariable({
text: item.value,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
);
acc[key] = valueTypeFormat(value, WorkflowIOValueTypeEnum.string); acc[key] = valueTypeFormat(value, WorkflowIOValueTypeEnum.string);
return acc; return acc;
}, {}); }, {});
@@ -165,25 +145,9 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
if (httpContentType === ContentTypes.formData) { if (httpContentType === ContentTypes.formData) {
if (!Array.isArray(httpFormBody)) return {}; if (!Array.isArray(httpFormBody)) return {};
httpFormBody = httpFormBody.map((item) => ({ httpFormBody = httpFormBody.map((item) => ({
key: replaceVariable( key: replaceStringVariables(item.key),
replaceEditorVariable({
text: item.key,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
),
type: item.type, type: item.type,
value: replaceVariable( value: replaceStringVariables(item.value)
replaceEditorVariable({
text: item.value,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
)
})); }));
const formData = new FormData(); const formData = new FormData();
for (const { key, value } of httpFormBody) { for (const { key, value } of httpFormBody) {
@@ -194,25 +158,9 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
if (httpContentType === ContentTypes.xWwwFormUrlencoded) { if (httpContentType === ContentTypes.xWwwFormUrlencoded) {
if (!Array.isArray(httpFormBody)) return {}; if (!Array.isArray(httpFormBody)) return {};
httpFormBody = httpFormBody.map((item) => ({ httpFormBody = httpFormBody.map((item) => ({
key: replaceVariable( key: replaceStringVariables(item.key),
replaceEditorVariable({
text: item.key,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
),
type: item.type, type: item.type,
value: replaceVariable( value: replaceStringVariables(item.value)
replaceEditorVariable({
text: item.value,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
)
})); }));
const urlSearchParams = new URLSearchParams(); const urlSearchParams = new URLSearchParams();
for (const { key, value } of httpFormBody) { for (const { key, value } of httpFormBody) {
@@ -228,15 +176,7 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
const removeSignJson = removeUndefinedSign(jsonParse); const removeSignJson = removeUndefinedSign(jsonParse);
return removeSignJson; return removeSignJson;
} }
httpJsonBody = replaceVariable( httpJsonBody = replaceStringVariables(httpJsonBody);
replaceEditorVariable({
text: httpJsonBody,
nodes: runtimeNodes,
variables,
runningNode: node
}),
allVariables
);
return httpJsonBody.replaceAll(UNDEFINED_SIGN, 'null'); return httpJsonBody.replaceAll(UNDEFINED_SIGN, 'null');
} catch (error) { } catch (error) {
console.log(error); console.log(error);

View File

@@ -45,7 +45,7 @@ import ChatBoxDivider from '../../Divider';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getNanoid } from '@fastgpt/global/common/string/tools'; import { getNanoid } from '@fastgpt/global/common/string/tools';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants'; import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { formatChatValue2InputType } from './utils'; import { checkIsInteractiveByHistories, formatChatValue2InputType } from './utils';
import { textareaMinH } from './constants'; import { textareaMinH } from './constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants'; import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import ChatProvider, { ChatBoxContext, ChatProviderProps } from './Provider'; import ChatProvider, { ChatBoxContext, ChatProviderProps } from './Provider';
@@ -156,15 +156,11 @@ const ChatBox = (
isChatting isChatting
} = useContextSelector(ChatBoxContext, (v) => v); } = useContextSelector(ChatBoxContext, (v) => v);
const isInteractive = useMemo(() => { // Workflow running, there are user input or selection
const lastAIHistory = chatHistories[chatHistories.length - 1]; const isInteractive = useMemo(
if (!lastAIHistory) return false; () => checkIsInteractiveByHistories(chatHistories),
const lastAIMessage = lastAIHistory.value as AIChatItemValueItemType[]; [chatHistories]
const interactiveContent = lastAIMessage?.find( );
(item) => item.type === ChatItemValueTypeEnum.interactive
)?.interactive?.params;
return !!interactiveContent;
}, [chatHistories]);
// compute variable input is finish. // compute variable input is finish.
const chatForm = useForm<ChatBoxInputFormType>({ const chatForm = useForm<ChatBoxInputFormType>({
@@ -343,16 +339,15 @@ const ChatBox = (
// create question guide // create question guide
const createQuestionGuide = useCallback( const createQuestionGuide = useCallback(
async ({ history }: { history: ChatSiteItemType[] }) => { async ({ histories }: { histories: ChatSiteItemType[] }) => {
if (!questionGuide || chatController.current?.signal?.aborted) return; if (!questionGuide || chatController.current?.signal?.aborted) return;
try { try {
const abortSignal = new AbortController(); const abortSignal = new AbortController();
questionGuideController.current = abortSignal; questionGuideController.current = abortSignal;
const result = await postQuestionGuide( const result = await postQuestionGuide(
{ {
messages: chats2GPTMessages({ messages: history, reserveId: false }).slice(-6), messages: chats2GPTMessages({ messages: histories, reserveId: false }).slice(-6),
shareId, shareId,
outLinkUid, outLinkUid,
teamId, teamId,
@@ -464,8 +459,9 @@ const ChatBox = (
} }
]; ];
// 插入内容 const isInteractive = checkIsInteractiveByHistories(history);
setChatHistories(newChatList); // Update histories(Interactive input does not require new session rounds)
setChatHistories(isInteractive ? newChatList.slice(0, -2) : newChatList);
// 清空输入内容 // 清空输入内容
resetInputVal({}); resetInputVal({});
@@ -476,6 +472,7 @@ const ChatBox = (
const abortSignal = new AbortController(); const abortSignal = new AbortController();
chatController.current = abortSignal; chatController.current = abortSignal;
// Last empty ai message will be removed
const messages = chats2GPTMessages({ messages: newChatList, reserveId: true }); const messages = chats2GPTMessages({ messages: newChatList, reserveId: true });
const { const {
@@ -483,7 +480,7 @@ const ChatBox = (
responseText, responseText,
isNewChat = false isNewChat = false
} = await onStartChat({ } = await onStartChat({
messages: messages.slice(0, -1), messages: messages,
responseChatItemId: responseChatId, responseChatItemId: responseChatId,
controller: abortSignal, controller: abortSignal,
generatingMessage: (e) => generatingMessage({ ...e, autoTTSResponse }), generatingMessage: (e) => generatingMessage({ ...e, autoTTSResponse }),
@@ -492,35 +489,29 @@ const ChatBox = (
isNewChatReplace.current = isNewChat; isNewChatReplace.current = isNewChat;
// set finish status // Set last chat finish status
setChatHistories((state) => let newChatHistories: ChatSiteItemType[] = [];
state.map((item, index) => { setChatHistories((state) => {
newChatHistories = state.map((item, index) => {
if (index !== state.length - 1) return item; if (index !== state.length - 1) return item;
return { return {
...item, ...item,
status: 'finish', status: 'finish',
responseData responseData: item.responseData
? [...item.responseData, ...responseData]
: responseData
}; };
})
);
setTimeout(() => {
createQuestionGuide({
history: newChatList.map((item, i) =>
i === newChatList.length - 1
? {
...item,
value: [
{
type: ChatItemValueTypeEnum.text,
text: {
content: responseText
}
}
]
}
: item
)
}); });
return newChatHistories;
});
setTimeout(() => {
if (!checkIsInteractiveByHistories(newChatHistories)) {
createQuestionGuide({
histories: newChatHistories
});
}
generatingScroll(); generatingScroll();
isPc && TextareaDom.current?.focus(); isPc && TextareaDom.current?.focus();
}, 100); }, 100);

View File

@@ -1,7 +1,11 @@
import { ChatItemValueItemType, ChatSiteItemType } from '@fastgpt/global/core/chat/type'; import {
AIChatItemValueItemType,
ChatItemValueItemType,
ChatSiteItemType
} from '@fastgpt/global/core/chat/type';
import { ChatBoxInputType, UserInputFileItemType } from './type'; import { ChatBoxInputType, UserInputFileItemType } from './type';
import { getFileIcon } from '@fastgpt/global/common/file/icon'; import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants'; import { ChatItemValueTypeEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => { export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => {
if (!value) { if (!value) {
@@ -38,6 +42,20 @@ export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): Chat
}; };
}; };
export const checkIsInteractiveByHistories = (chatHistories: ChatSiteItemType[]) => {
const lastAIHistory = chatHistories[chatHistories.length - 1];
if (!lastAIHistory) return false;
const lastMessageValue = lastAIHistory.value[
lastAIHistory.value.length - 1
] as AIChatItemValueItemType;
return (
lastMessageValue.type === ChatItemValueTypeEnum.interactive &&
!!lastMessageValue?.interactive?.params
);
};
export const setUserSelectResultToHistories = ( export const setUserSelectResultToHistories = (
histories: ChatSiteItemType[], histories: ChatSiteItemType[],
selectVal: string selectVal: string
@@ -47,9 +65,14 @@ export const setUserSelectResultToHistories = (
// @ts-ignore // @ts-ignore
return histories.map((item, i) => { return histories.map((item, i) => {
if (i !== histories.length - 1) return item; if (i !== histories.length - 1) return item;
item.value;
const value = item.value.map((val) => { const value = item.value.map((val, i) => {
if (val.type !== ChatItemValueTypeEnum.interactive || !val.interactive) return val; if (
i !== item.value.length - 1 ||
val.type !== ChatItemValueTypeEnum.interactive ||
!val.interactive
)
return val;
return { return {
...val, ...val,
@@ -67,6 +90,7 @@ export const setUserSelectResultToHistories = (
return { return {
...item, ...item,
status: ChatStatusEnum.loading,
value value
}; };
}); });

View File

@@ -16,7 +16,7 @@ import {
ChatSiteItemType, ChatSiteItemType,
UserChatItemValueItemType UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type'; } from '@fastgpt/global/core/chat/type';
import React from 'react'; import React, { useMemo } from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon'; import MyIcon from '@fastgpt/web/components/common/Icon';
import Avatar from '@fastgpt/web/components/common/Avatar'; import Avatar from '@fastgpt/web/components/common/Avatar';
import { SendPromptFnType } from '../ChatContainer/ChatBox/type'; import { SendPromptFnType } from '../ChatContainer/ChatBox/type';
@@ -45,144 +45,168 @@ const AIResponseBox = ({
}: props) => { }: props) => {
const chatHistories = useContextSelector(ChatBoxContext, (v) => v.chatHistories); const chatHistories = useContextSelector(ChatBoxContext, (v) => v.chatHistories);
if (value.type === ChatItemValueTypeEnum.text && value.text) { // Question guide
let source = (value.text?.content || '').trim(); const RenderQuestionGuide = useMemo(() => {
// First empty line
if (!source && chat.value.length > 1) return null;
// computed question guide
if ( if (
isLastChild && isLastChild &&
!isChatting && !isChatting &&
questionGuides.length > 0 && questionGuides.length > 0 &&
index === chat.value.length - 1 index === chat.value.length - 1
) { ) {
source = `${source} return (
\`\`\`${CodeClassNameEnum.questionGuide} <Markdown
${JSON.stringify(questionGuides)}`; source={`\`\`\`${CodeClassNameEnum.questionGuide}
${JSON.stringify(questionGuides)}`}
/>
);
} }
return null;
}, [chat.value.length, index, isChatting, isLastChild, questionGuides]);
return ( const Render = useMemo(() => {
<Markdown if (value.type === ChatItemValueTypeEnum.text && value.text) {
source={source} let source = (value.text?.content || '').trim();
showAnimation={isLastChild && isChatting && index === chat.value.length - 1}
/>
);
}
if (value.type === ChatItemValueTypeEnum.tool && value.tools) {
return (
<Box>
{value.tools.map((tool) => {
const toolParams = (() => {
try {
return JSON.stringify(JSON.parse(tool.params), null, 2);
} catch (error) {
return tool.params;
}
})();
const toolResponse = (() => {
try {
return JSON.stringify(JSON.parse(tool.response), null, 2);
} catch (error) {
return tool.response;
}
})();
return ( // First empty line
<Accordion key={tool.id} allowToggle> if (!source && chat.value.length > 1) return null;
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton return (
w={'auto'} <Markdown
bg={'white'} source={source}
borderRadius={'md'} showAnimation={isLastChild && isChatting && index === chat.value.length - 1}
borderWidth={'1px'} />
borderColor={'myGray.200'} );
boxShadow={'1'} }
pl={3} if (value.type === ChatItemValueTypeEnum.tool && value.tools) {
pr={2.5} return (
_hover={{ <Box>
bg: 'auto' {value.tools.map((tool) => {
const toolParams = (() => {
try {
return JSON.stringify(JSON.parse(tool.params), null, 2);
} catch (error) {
return tool.params;
}
})();
const toolResponse = (() => {
try {
return JSON.stringify(JSON.parse(tool.response), null, 2);
} catch (error) {
return tool.response;
}
})();
return (
<Accordion key={tool.id} allowToggle>
<AccordionItem borderTop={'none'} borderBottom={'none'}>
<AccordionButton
w={'auto'}
bg={'white'}
borderRadius={'md'}
borderWidth={'1px'}
borderColor={'myGray.200'}
boxShadow={'1'}
pl={3}
pr={2.5}
_hover={{
bg: 'auto'
}}
>
<Avatar src={tool.toolAvatar} w={'1.25rem'} h={'1.25rem'} borderRadius={'sm'} />
<Box mx={2} fontSize={'sm'} color={'myGray.900'}>
{tool.toolName}
</Box>
{isChatting && !tool.response && <MyIcon name={'common/loading'} w={'14px'} />}
<AccordionIcon color={'myGray.600'} ml={5} />
</AccordionButton>
<AccordionPanel
py={0}
px={0}
mt={3}
borderRadius={'md'}
overflow={'hidden'}
maxH={'500px'}
overflowY={'auto'}
>
{toolParams && toolParams !== '{}' && (
<Box mb={3}>
<Markdown
source={`~~~json#Input
${toolParams}`}
/>
</Box>
)}
{toolResponse && (
<Markdown
source={`~~~json#Response
${toolResponse}`}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
);
})}
</Box>
);
}
if (
value.type === ChatItemValueTypeEnum.interactive &&
value.interactive &&
value.interactive.type === 'userSelect'
) {
return (
<>
{value.interactive?.params?.description && (
<Markdown source={value.interactive.params.description} />
)}
<Flex flexDirection={'column'} gap={2} w={'250px'}>
{value.interactive.params.userSelectOptions?.map((option) => {
const selected = option.value === value.interactive?.params?.userSelectedVal;
return (
<Button
key={option.key}
variant={'whitePrimary'}
whiteSpace={'pre-wrap'}
isDisabled={value.interactive?.params?.userSelectedVal !== undefined}
{...(selected
? {
_disabled: {
cursor: 'default',
borderColor: 'primary.300',
bg: 'primary.50 !important',
color: 'primary.600'
}
}
: {})}
onClick={() => {
onSendMessage?.({
text: option.value,
history: setUserSelectResultToHistories(chatHistories, option.value)
});
}} }}
> >
<Avatar src={tool.toolAvatar} w={'1.25rem'} h={'1.25rem'} borderRadius={'sm'} /> {option.value}
<Box mx={2} fontSize={'sm'} color={'myGray.900'}> </Button>
{tool.toolName} );
</Box> })}
{isChatting && !tool.response && <MyIcon name={'common/loading'} w={'14px'} />} </Flex>
<AccordionIcon color={'myGray.600'} ml={5} /> {/* Animation */}
</AccordionButton> {isLastChild && isChatting && index === chat.value.length - 1 && (
<AccordionPanel <Markdown source={''} showAnimation />
py={0} )}
px={0} </>
mt={3} );
borderRadius={'md'} }
overflow={'hidden'} }, [chat.value.length, chatHistories, index, isChatting, isLastChild, onSendMessage, value]);
maxH={'500px'}
overflowY={'auto'}
>
{toolParams && toolParams !== '{}' && (
<Box mb={3}>
<Markdown
source={`~~~json#Input
${toolParams}`}
/>
</Box>
)}
{toolResponse && (
<Markdown
source={`~~~json#Response
${toolResponse}`}
/>
)}
</AccordionPanel>
</AccordionItem>
</Accordion>
);
})}
</Box>
);
}
if (
value.type === ChatItemValueTypeEnum.interactive &&
value.interactive &&
value.interactive.type === 'userSelect'
) {
return (
<Flex flexDirection={'column'} gap={2} minW={'200px'} maxW={'250px'}>
{value.interactive.params.userSelectOptions?.map((option) => {
const selected = option.value === value.interactive?.params?.userSelectedVal;
return ( return (
<Button <>
key={option.key} {Render}
variant={'whitePrimary'} {RenderQuestionGuide}
isDisabled={!isLastChild && value.interactive?.params?.userSelectedVal !== undefined} </>
{...(selected );
? {
_disabled: {
cursor: 'default',
borderColor: 'primary.300',
bg: 'primary.50 !important',
color: 'primary.600'
}
}
: {})}
onClick={() => {
onSendMessage?.({
text: option.value,
history: setUserSelectResultToHistories(chatHistories, option.value)
});
}}
>
{option.value}
</Button>
);
})}
</Flex>
);
}
return null;
}; };
export default React.memo(AIResponseBox); export default React.memo(AIResponseBox);

View File

@@ -59,6 +59,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
const chatMessages = GPTMessages2Chats(messages); const chatMessages = GPTMessages2Chats(messages);
// console.log(JSON.stringify(chatMessages, null, 2), '====', chatMessages.length);
const userInput = chatMessages.pop()?.value as UserChatItemValueItemType[] | undefined; const userInput = chatMessages.pop()?.value as UserChatItemValueItemType[] | undefined;
/* user auth */ /* user auth */

View File

@@ -17,11 +17,12 @@ import {
getMaxHistoryLimitFromNodes, getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus, initWorkflowEdgeStatus,
storeNodes2RuntimeNodes, storeNodes2RuntimeNodes,
textAdaptGptResponse textAdaptGptResponse,
getLastInteractiveValue
} from '@fastgpt/global/core/workflow/runtime/utils'; } from '@fastgpt/global/core/workflow/runtime/utils';
import { GPTMessages2Chats, chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt'; import { GPTMessages2Chats, chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
import { getChatItems } from '@fastgpt/service/core/chat/controller'; import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { saveChat } from '@fastgpt/service/core/chat/saveChat'; import { saveChat, updateInteractiveChat } from '@fastgpt/service/core/chat/saveChat';
import { responseWrite } from '@fastgpt/service/common/response'; import { responseWrite } from '@fastgpt/service/common/response';
import { pushChatUsage } from '@/service/support/wallet/usage/push'; import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { authOutLinkChatStart } from '@/service/support/permission/auth/outLink'; import { authOutLinkChatStart } from '@/service/support/permission/auth/outLink';
@@ -45,7 +46,7 @@ import { AuthOutLinkChatProps } from '@fastgpt/global/support/outLink/api';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema'; import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat'; import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { UserChatItemType } from '@fastgpt/global/core/chat/type'; import { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants'; import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { NextAPI } from '@/service/middleware/entry'; import { NextAPI } from '@/service/middleware/entry';
@@ -210,9 +211,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
MongoChat.findOne({ appId: app._id, chatId }, 'source variableList variables') MongoChat.findOne({ appId: app._id, chatId }, 'source variableList variables')
]); ]);
// Get chat histories
const newHistories = concatHistories(histories, chatMessages);
// Get store variables(Api variable precedence) // Get store variables(Api variable precedence)
if (chatDetail?.variables) { if (chatDetail?.variables) {
variables = { variables = {
@@ -221,6 +219,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}; };
} }
// Get chat histories
const newHistories = concatHistories(histories, chatMessages);
// Get runtimeNodes // Get runtimeNodes
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories)); let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories));
if (isPlugin) { if (isPlugin) {
@@ -286,36 +287,51 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
return ChatSourceEnum.online; return ChatSourceEnum.online;
})(); })();
const isInteractiveRequest = !!getLastInteractiveValue(histories);
const { text: userSelectedVal } = chatValue2RuntimePrompt(userQuestion.value);
const newTitle = isPlugin const newTitle = isPlugin
? variables.cTime ?? getSystemTime(user.timezone) ? variables.cTime ?? getSystemTime(user.timezone)
: getChatTitleFromChatMessage(userQuestion); : getChatTitleFromChatMessage(userQuestion);
await saveChat({ const aiResponse: AIChatItemType & { dataId?: string } = {
chatId, dataId: responseChatItemId,
appId: app._id, obj: ChatRoleEnum.AI,
teamId, value: assistantResponses,
tmbId: tmbId, [DispatchNodeResponseKeyEnum.nodeResponse]: flowResponses
nodes, };
appChatConfig: chatConfig,
variables: newVariables, if (isInteractiveRequest) {
isUpdateUseTime: isOwnerUse && source === ChatSourceEnum.online, // owner update use time await updateInteractiveChat({
newTitle, chatId,
shareId, appId: app._id,
outLinkUid: outLinkUserId, teamId,
source, tmbId: tmbId,
content: [ userSelectedVal,
userQuestion, aiResponse,
{ newVariables,
dataId: responseChatItemId, newTitle
obj: ChatRoleEnum.AI, });
value: assistantResponses, } else {
[DispatchNodeResponseKeyEnum.nodeResponse]: flowResponses await saveChat({
chatId,
appId: app._id,
teamId,
tmbId: tmbId,
nodes,
appChatConfig: chatConfig,
variables: newVariables,
isUpdateUseTime: isOwnerUse && source === ChatSourceEnum.online, // owner update use time
newTitle,
shareId,
outLinkUid: outLinkUserId,
source,
content: [userQuestion, aiResponse],
metadata: {
originIp
} }
], });
metadata: { }
originIp
}
});
} }
addLog.info(`completions running time: ${(Date.now() - startTime) / 1000}s`); addLog.info(`completions running time: ${(Date.now() - startTime) / 1000}s`);

View File

@@ -466,7 +466,21 @@ const RenderList = React.memo(function RenderList({
flowNodeType: templateNode.flowNodeType, flowNodeType: templateNode.flowNodeType,
pluginId: templateNode.pluginId pluginId: templateNode.pluginId
}), }),
intro: t(templateNode.intro as any) intro: t(templateNode.intro as any),
inputs: templateNode.inputs.map((input) => ({
...input,
valueDesc: t(input.valueDesc as any),
label: t(input.label as any),
description: t(input.description as any),
debugLabel: t(input.debugLabel as any),
toolDescription: t(input.toolDescription as any)
})),
outputs: templateNode.outputs.map((output) => ({
...output,
valueDesc: t(output.valueDesc as any),
label: t(output.label as any),
description: t(output.description as any)
}))
}, },
position: { x: mouseX, y: mouseY - 20 }, position: { x: mouseX, y: mouseY - 20 },
selected: true selected: true

View File

@@ -26,6 +26,7 @@ import { AiChatModule } from '@fastgpt/global/core/workflow/template/system/aiCh
import { DatasetSearchModule } from '@fastgpt/global/core/workflow/template/system/datasetSearch'; import { DatasetSearchModule } from '@fastgpt/global/core/workflow/template/system/datasetSearch';
import { ReadFilesNodes } from '@fastgpt/global/core/workflow/template/system/readFiles'; import { ReadFilesNodes } from '@fastgpt/global/core/workflow/template/system/readFiles';
import { i18nT } from '@fastgpt/web/i18n/utils'; import { i18nT } from '@fastgpt/web/i18n/utils';
import { Input_Template_UserChatInput } from '@fastgpt/global/core/workflow/template/input';
type WorkflowType = { type WorkflowType = {
nodes: StoreNodeItemType[]; nodes: StoreNodeItemType[];
@@ -259,12 +260,8 @@ export function form2AppWorkflow(
value: formData.dataset.datasetSearchExtensionBg value: formData.dataset.datasetSearchExtensionBg
}, },
{ {
key: 'userChatInput', ...Input_Template_UserChatInput,
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea], toolDescription: i18nT('workflow:content_to_search'),
valueType: WorkflowIOValueTypeEnum.string,
label: '用户问题',
required: true,
toolDescription: '需要检索的内容',
value: question value: question
} }
], ],
@@ -503,6 +500,18 @@ export function form2AppWorkflow(
] ]
}; };
// Add t
config.nodes.forEach((node) => {
node.name = t(node.name);
node.intro = t(node.intro);
node.inputs.forEach((input) => {
input.label = t(input.label);
input.description = t(input.description);
input.toolDescription = t(input.toolDescription);
});
});
return config; return config;
} }