4.8.11 perf (#2768)

* perf: watch local

* perf: dataset list ui

* perf: Check workflow invalid edges in saved

* remove log

* perf: Forbid touch scale

* perf: rename dataset process

* feat: support child app unstream mode

* feat: Dispatch child app will record detail

* feat: Save childApp run log

* fix: share page init error

* perf: chatId reset
This commit is contained in:
Archer
2024-09-23 10:17:49 +08:00
committed by GitHub
parent 4245ea4998
commit 3ab934771f
38 changed files with 252 additions and 143 deletions

View File

@@ -21,7 +21,7 @@
"i18n-ally.namespace": true, "i18n-ally.namespace": true,
"i18n-ally.pathMatcher": "{locale}/{namespaces}.json", "i18n-ally.pathMatcher": "{locale}/{namespaces}.json",
"i18n-ally.extract.targetPickingStrategy": "most-similar-by-key", "i18n-ally.extract.targetPickingStrategy": "most-similar-by-key",
"i18n-ally.translate.engines": ["deepl", "google"], "i18n-ally.translate.engines": ["google"],
"[typescript]": { "[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode" "editor.defaultFormatter": "esbenp.prettier-vscode"
} }

View File

@@ -60,7 +60,7 @@ weight: 708
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": true, "vision": true,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -83,7 +83,7 @@ weight: 708
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -110,7 +110,7 @@ weight: 708
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,

View File

@@ -135,7 +135,7 @@ CHAT_API_KEY=sk-xxxxxx
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, // 是否支持图片输入 "vision": false, // 是否支持图片输入
"datasetProcess": false, // 是否设置为知识库处理模型 "datasetProcess": true, // 是否设置为知识库处理模型
"usedInClassify": true, // 是否用于问题分类 "usedInClassify": true, // 是否用于问题分类
"usedInExtractFields": true, // 是否用于字段提取 "usedInExtractFields": true, // 是否用于字段提取
"usedInToolCall": true, // 是否用于工具调用 "usedInToolCall": true, // 是否用于工具调用

View File

@@ -27,7 +27,7 @@ weight: 813
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -56,7 +56,7 @@ weight: 813
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -92,11 +92,14 @@ weight: 813
6. 新增 - 支持 Openai o1 模型,需增加模型的 `defaultConfig` 配置,覆盖 `temperature``max_tokens``stream`配置o1 不支持 stream 模式, 详细可重新拉取 `config.json` 配置文件查看。 6. 新增 - 支持 Openai o1 模型,需增加模型的 `defaultConfig` 配置,覆盖 `temperature``max_tokens``stream`配置o1 不支持 stream 模式, 详细可重新拉取 `config.json` 配置文件查看。
7. 新增 - AI 对话节点知识库引用,支持配置 role=system 和 role=user已配置的过自定义提示词的节点将会保持 user 模式,其余用户将转成 system 模式。 7. 新增 - AI 对话节点知识库引用,支持配置 role=system 和 role=user已配置的过自定义提示词的节点将会保持 user 模式,其余用户将转成 system 模式。
8. 新增 - 插件支持上传系统文件。 8. 新增 - 插件支持上传系统文件。
9. 优化 - 工作流嵌套层级限制 20 层,避免因编排不合理导致的无限死循环 9. 新增 - 支持工作流嵌套子应用时,可以设置`非流模式`
10. 优化 - 工作流 handler 性能优化 10. 新增 - 调试模式下,子应用调用,支持返回详细运行数据
11. 优化 - 工作流快捷键,避免调试测试时也会触发 11. 新增 - 保留所有模式下子应用嵌套调用的日志
12. 优化 - 流输出,切换 tab 时仍可以继续输出 12. 优化 - 工作流嵌套层级限制 20 层,避免因编排不合理导致的无限死循环
13. 优化 - 完善外部文件知识库相关 API 13. 优化 - 工作流 handler 性能优化。
14. 修复 - 知识库选择权限问题 14. 优化 - 工作流快捷键,避免调试测试时也会触发
15. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常 15. 优化 - 流输出,切换 tab 时仍可以继续输出
16. 修复 - createDataset 接口intro 为赋值。 16. 优化 - 完善外部文件知识库相关 API
17. 修复 - 知识库选择权限问题。
18. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常。
19. 修复 - createDataset 接口intro 为赋值。

View File

@@ -19,7 +19,7 @@ data:
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -63,7 +63,7 @@ data:
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -85,7 +85,7 @@ data:
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": true, "vision": true,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": false, "usedInClassify": false,
"usedInExtractFields": false, "usedInExtractFields": false,
"usedInToolCall": false, "usedInToolCall": false,

View File

@@ -70,6 +70,7 @@ export enum NodeInputKeyEnum {
anyInput = 'system_anyInput', anyInput = 'system_anyInput',
textareaInput = 'system_textareaInput', textareaInput = 'system_textareaInput',
addInputParam = 'system_addInputParam', addInputParam = 'system_addInputParam',
forbidStream = 'system_forbid_stream',
// history // history
historyMaxAmount = 'maxContext', historyMaxAmount = 'maxContext',

View File

@@ -105,3 +105,12 @@ export const Input_Template_Node_Height: FlowNodeInputItemType = {
label: '', label: '',
value: 900 value: 900
}; };
export const Input_Template_Stream_MODE: FlowNodeInputItemType = {
key: NodeInputKeyEnum.forbidStream,
renderTypeList: [FlowNodeInputTypeEnum.switch],
valueType: WorkflowIOValueTypeEnum.boolean,
label: i18nT('workflow:template.forbid_stream'),
description: i18nT('workflow:template.forbid_stream_desc'),
value: false
};

View File

@@ -31,7 +31,11 @@ import {
import { IfElseResultEnum } from './template/system/ifElse/constant'; import { IfElseResultEnum } from './template/system/ifElse/constant';
import { RuntimeNodeItemType } from './runtime/type'; import { RuntimeNodeItemType } from './runtime/type';
import { getReferenceVariableValue } from './runtime/utils'; import { getReferenceVariableValue } from './runtime/utils';
import { Input_Template_History, Input_Template_UserChatInput } from './template/input'; import {
Input_Template_History,
Input_Template_Stream_MODE,
Input_Template_UserChatInput
} from './template/input';
import { i18nT } from '../../../web/i18n/utils'; import { i18nT } from '../../../web/i18n/utils';
import { RuntimeUserPromptType, UserChatItemType } from '../../core/chat/type'; import { RuntimeUserPromptType, UserChatItemType } from '../../core/chat/type';
import { getNanoid } from '../../common/string/tools'; import { getNanoid } from '../../common/string/tools';
@@ -179,17 +183,21 @@ export const pluginData2FlowNodeIO = ({
const pluginOutput = nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginOutput); const pluginOutput = nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginOutput);
return { return {
inputs: inputs: pluginInput
pluginInput?.inputs.map((item) => ({ ? [
...item, Input_Template_Stream_MODE,
...getModuleInputUiField(item), ...pluginInput?.inputs.map((item) => ({
value: getOrInitModuleInputValue(item), ...item,
canEdit: false, ...getModuleInputUiField(item),
renderTypeList: value: getOrInitModuleInputValue(item),
item.renderTypeList[0] === FlowNodeInputTypeEnum.customVariable canEdit: false,
? [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.input] renderTypeList:
: item.renderTypeList item.renderTypeList[0] === FlowNodeInputTypeEnum.customVariable
})) || [], ? [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.input]
: item.renderTypeList
}))
]
: [],
outputs: pluginOutput outputs: pluginOutput
? [ ? [
...pluginOutput.inputs.map((item) => ({ ...pluginOutput.inputs.map((item) => ({
@@ -250,6 +258,7 @@ export const appData2FlowNodeIO = ({
return { return {
inputs: [ inputs: [
Input_Template_Stream_MODE,
Input_Template_History, Input_Template_History,
Input_Template_UserChatInput, Input_Template_UserChatInput,
// ...(showFileLink ? [Input_Template_File_Link] : []), // ...(showFileLink ? [Input_Template_File_Link] : []),

View File

@@ -4,7 +4,11 @@ import { countGptMessagesTokens } from '../../../common/string/tiktoken/index';
import { loadRequestMessages } from '../../chat/utils'; import { loadRequestMessages } from '../../chat/utils';
import { llmCompletionsBodyFormat } from '../utils'; import { llmCompletionsBodyFormat } from '../utils';
export const Prompt_QuestionGuide = `你是一个AI智能助手可以回答和解决我的问题。请结合前面的对话记录,帮我生成 3 个问题引导我继续提问生成问题的语言要与原问题相同。问题的长度应小于20个字符按 JSON 格式返回: ["问题1", "问题2", "问题3"]`; export const Prompt_QuestionGuide = `你是一个AI智能助手你的任务是结合对话记录,推测我下一步的问题。
你需要生成 3 个可能的问题,引导我继续提问,生成的问题要求:
1. 生成问题的语言,与最后一个用户提问语言一致。
2. 问题的长度应小于20个字符。
3. 按 JSON 格式返回: ["question1", "question2", "question3"]。`;
export async function createQuestionGuide({ export async function createQuestionGuide({
messages, messages,

View File

@@ -34,7 +34,7 @@ export async function splitCombinePluginId(id: string) {
return { source, pluginId: id }; return { source, pluginId: id };
} }
const getPluginTemplateById = async ( const getChildAppTemplateById = async (
id: string id: string
): Promise<SystemPluginTemplateItemType & { teamId?: string }> => { ): Promise<SystemPluginTemplateItemType & { teamId?: string }> => {
const { source, pluginId } = await splitCombinePluginId(id); const { source, pluginId } = await splitCombinePluginId(id);
@@ -69,44 +69,48 @@ const getPluginTemplateById = async (
}; };
/* format plugin modules to plugin preview module */ /* format plugin modules to plugin preview module */
export async function getPluginPreviewNode({ id }: { id: string }): Promise<FlowNodeTemplateType> { export async function getChildAppPreviewNode({
const plugin = await getPluginTemplateById(id); id
const isPlugin = !!plugin.workflow.nodes.find( }: {
id: string;
}): Promise<FlowNodeTemplateType> {
const app = await getChildAppTemplateById(id);
const isPlugin = !!app.workflow.nodes.find(
(node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput (node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput
); );
return { return {
id: getNanoid(), id: getNanoid(),
pluginId: plugin.id, pluginId: app.id,
templateType: plugin.templateType, templateType: app.templateType,
flowNodeType: isPlugin ? FlowNodeTypeEnum.pluginModule : FlowNodeTypeEnum.appModule, flowNodeType: isPlugin ? FlowNodeTypeEnum.pluginModule : FlowNodeTypeEnum.appModule,
avatar: plugin.avatar, avatar: app.avatar,
name: plugin.name, name: app.name,
intro: plugin.intro, intro: app.intro,
inputExplanationUrl: plugin.inputExplanationUrl, inputExplanationUrl: app.inputExplanationUrl,
showStatus: plugin.showStatus, showStatus: app.showStatus,
isTool: isPlugin, isTool: isPlugin,
version: plugin.version, version: app.version,
sourceHandle: getHandleConfig(true, true, true, true), sourceHandle: getHandleConfig(true, true, true, true),
targetHandle: getHandleConfig(true, true, true, true), targetHandle: getHandleConfig(true, true, true, true),
...(isPlugin ...(isPlugin
? pluginData2FlowNodeIO({ nodes: plugin.workflow.nodes }) ? pluginData2FlowNodeIO({ nodes: app.workflow.nodes })
: appData2FlowNodeIO({ chatConfig: plugin.workflow.chatConfig })) : appData2FlowNodeIO({ chatConfig: app.workflow.chatConfig }))
}; };
} }
/* run plugin time */ /* run plugin time */
export async function getPluginRuntimeById(id: string): Promise<PluginRuntimeType> { export async function getChildAppRuntimeById(id: string): Promise<PluginRuntimeType> {
const plugin = await getPluginTemplateById(id); const app = await getChildAppTemplateById(id);
return { return {
id: plugin.id, id: app.id,
teamId: plugin.teamId, teamId: app.teamId,
name: plugin.name, name: app.name,
avatar: plugin.avatar, avatar: app.avatar,
showStatus: plugin.showStatus, showStatus: app.showStatus,
currentCost: plugin.currentCost, currentCost: app.currentCost,
nodes: plugin.workflow.nodes, nodes: app.workflow.nodes,
edges: plugin.workflow.edges edges: app.workflow.edges
}; };
} }

View File

@@ -13,6 +13,7 @@ export const computedPluginUsage = async (
) => { ) => {
const { source } = await splitCombinePluginId(plugin.id); const { source } = await splitCombinePluginId(plugin.id);
// Commercial plugin: n points per times
if (source === PluginSourceEnum.commercial) { if (source === PluginSourceEnum.commercial) {
return plugin.currentCost ?? 0; return plugin.currentCost ?? 0;
} }

View File

@@ -2,7 +2,7 @@ import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/
import { dispatchWorkFlow } from '../index'; import { dispatchWorkFlow } from '../index';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants'; import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { getPluginRuntimeById } from '../../../app/plugin/controller'; import { getChildAppRuntimeById } from '../../../app/plugin/controller';
import { import {
getWorkflowEntryNodeIds, getWorkflowEntryNodeIds,
initWorkflowEdgeStatus, initWorkflowEdgeStatus,
@@ -16,8 +16,10 @@ import { filterSystemVariables } from '../utils';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt'; import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
import { getPluginRunUserQuery } from '@fastgpt/global/core/workflow/utils'; import { getPluginRunUserQuery } from '@fastgpt/global/core/workflow/utils';
import { getPluginInputsFromStoreNodes } from '@fastgpt/global/core/app/plugin/utils'; import { getPluginInputsFromStoreNodes } from '@fastgpt/global/core/app/plugin/utils';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
type RunPluginProps = ModuleDispatchProps<{ type RunPluginProps = ModuleDispatchProps<{
[NodeInputKeyEnum.forbidStream]?: boolean;
[key: string]: any; [key: string]: any;
}>; }>;
type RunPluginResponse = DispatchNodeResultType<{}>; type RunPluginResponse = DispatchNodeResultType<{}>;
@@ -26,9 +28,8 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
const { const {
node: { pluginId }, node: { pluginId },
runningAppInfo, runningAppInfo,
mode,
query, query,
params: data // Plugin input params: { system_forbid_stream = false, ...data } // Plugin input
} = props; } = props;
if (!pluginId) { if (!pluginId) {
@@ -44,7 +45,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
per: ReadPermissionVal per: ReadPermissionVal
}); });
const plugin = await getPluginRuntimeById(pluginId); const plugin = await getChildAppRuntimeById(pluginId);
const runtimeNodes = storeNodes2RuntimeNodes( const runtimeNodes = storeNodes2RuntimeNodes(
plugin.nodes, plugin.nodes,
@@ -73,6 +74,13 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
const { flowResponses, flowUsages, assistantResponses, runTimes } = await dispatchWorkFlow({ const { flowResponses, flowUsages, assistantResponses, runTimes } = await dispatchWorkFlow({
...props, ...props,
// Rewrite stream mode
...(system_forbid_stream
? {
stream: false,
workflowStreamResponse: undefined
}
: {}),
runningAppInfo: { runningAppInfo: {
id: String(plugin.id), id: String(plugin.id),
teamId: plugin.teamId || '', teamId: plugin.teamId || '',
@@ -95,11 +103,12 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
output.moduleLogo = plugin.avatar; output.moduleLogo = plugin.avatar;
} }
const isError = !!output?.pluginOutput?.error; const usagePoints = await computedPluginUsage(plugin, flowUsages);
const usagePoints = isError ? 0 : await computedPluginUsage(plugin, flowUsages); const childStreamResponse = system_forbid_stream ? false : props.stream;
return { return {
assistantResponses, // 嵌套运行时,如果 childApp stream=false实际上不会有任何内容输出给用户所以不需要存储
assistantResponses: childStreamResponse ? assistantResponses : [],
// responseData, // debug // responseData, // debug
[DispatchNodeResponseKeyEnum.runTimes]: runTimes, [DispatchNodeResponseKeyEnum.runTimes]: runTimes,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {
@@ -107,7 +116,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
totalPoints: usagePoints, totalPoints: usagePoints,
pluginOutput: output?.pluginOutput, pluginOutput: output?.pluginOutput,
pluginDetail: pluginDetail:
mode === 'test' && plugin.teamId === runningAppInfo.teamId pluginData && pluginData.permission.hasWritePer // Not system plugin
? flowResponses.filter((item) => { ? flowResponses.filter((item) => {
const filterArr = [FlowNodeTypeEnum.pluginOutput]; const filterArr = [FlowNodeTypeEnum.pluginOutput];
return !filterArr.includes(item.moduleType as any); return !filterArr.includes(item.moduleType as any);

View File

@@ -22,6 +22,7 @@ type Props = ModuleDispatchProps<{
[NodeInputKeyEnum.userChatInput]: string; [NodeInputKeyEnum.userChatInput]: string;
[NodeInputKeyEnum.history]?: ChatItemType[] | number; [NodeInputKeyEnum.history]?: ChatItemType[] | number;
[NodeInputKeyEnum.fileUrlList]?: string[]; [NodeInputKeyEnum.fileUrlList]?: string[];
[NodeInputKeyEnum.forbidStream]?: boolean;
}>; }>;
type Response = DispatchNodeResultType<{ type Response = DispatchNodeResultType<{
[NodeOutputKeyEnum.answerText]: string; [NodeOutputKeyEnum.answerText]: string;
@@ -33,13 +34,14 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
runningAppInfo, runningAppInfo,
histories, histories,
query, query,
mode,
node: { pluginId }, node: { pluginId },
workflowStreamResponse, workflowStreamResponse,
params, params,
variables variables
} = props; } = props;
const { userChatInput, history, ...childrenAppVariables } = params; const { system_forbid_stream = false, userChatInput, history, ...childrenAppVariables } = params;
if (!userChatInput) { if (!userChatInput) {
return Promise.reject('Input is empty'); return Promise.reject('Input is empty');
} }
@@ -54,14 +56,17 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
per: ReadPermissionVal per: ReadPermissionVal
}); });
const { nodes, edges, chatConfig } = await getAppLatestVersion(pluginId); const { nodes, edges, chatConfig } = await getAppLatestVersion(pluginId);
const childStreamResponse = system_forbid_stream ? false : props.stream;
// Auto line // Auto line
workflowStreamResponse?.({ if (childStreamResponse) {
event: SseResponseEventEnum.answer, workflowStreamResponse?.({
data: textAdaptGptResponse({ event: SseResponseEventEnum.answer,
text: '\n' data: textAdaptGptResponse({
}) text: '\n'
}); })
});
}
const chatHistories = getHistories(history, histories); const chatHistories = getHistories(history, histories);
const { files } = chatValue2RuntimePrompt(query); const { files } = chatValue2RuntimePrompt(query);
@@ -77,6 +82,13 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
const { flowResponses, flowUsages, assistantResponses, runTimes } = await dispatchWorkFlow({ const { flowResponses, flowUsages, assistantResponses, runTimes } = await dispatchWorkFlow({
...props, ...props,
// Rewrite stream mode
...(system_forbid_stream
? {
stream: false,
workflowStreamResponse: undefined
}
: {}),
runningAppInfo: { runningAppInfo: {
id: String(appData._id), id: String(appData._id),
teamId: String(appData.teamId), teamId: String(appData.teamId),
@@ -106,21 +118,26 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
const { text } = chatValue2RuntimePrompt(assistantResponses); const { text } = chatValue2RuntimePrompt(assistantResponses);
const usagePoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
return { return {
assistantResponses, assistantResponses: childStreamResponse ? assistantResponses : [],
[DispatchNodeResponseKeyEnum.runTimes]: runTimes, [DispatchNodeResponseKeyEnum.runTimes]: runTimes,
[DispatchNodeResponseKeyEnum.nodeResponse]: { [DispatchNodeResponseKeyEnum.nodeResponse]: {
moduleLogo: appData.avatar, moduleLogo: appData.avatar,
totalPoints: usagePoints,
query: userChatInput, query: userChatInput,
textOutput: text, textOutput: text,
totalPoints: flowResponses.reduce((sum, item) => sum + (item.totalPoints || 0), 0) pluginDetail: appData.permission.hasWritePer ? flowResponses : undefined
}, },
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [ [DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{ {
moduleName: appData.name, moduleName: appData.name,
totalPoints: flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0) totalPoints: usagePoints,
tokens: 0
} }
], ],
[DispatchNodeResponseKeyEnum.toolResponses]: text,
answerText: text, answerText: text,
history: completeMessages history: completeMessages
}; };

View File

@@ -12,17 +12,18 @@ import {
import { responseWrite } from '../../../common/response'; import { responseWrite } from '../../../common/response';
import { NextApiResponse } from 'next'; import { NextApiResponse } from 'next';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants'; import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
export const getWorkflowResponseWrite = ({ export const getWorkflowResponseWrite = ({
res, res,
detail, detail,
streamResponse, streamResponse,
id id = getNanoid(24)
}: { }: {
res?: NextApiResponse; res?: NextApiResponse;
detail: boolean; detail: boolean;
streamResponse: boolean; streamResponse: boolean;
id: string; id?: string;
}) => { }) => {
return ({ return ({
write, write,

View File

@@ -6,7 +6,7 @@
* *
*/ */
import { useState, useRef, useTransition } from 'react'; import { useState, useRef, useTransition, useCallback } from 'react';
import { LexicalComposer } from '@lexical/react/LexicalComposer'; import { LexicalComposer } from '@lexical/react/LexicalComposer';
import { PlainTextPlugin } from '@lexical/react/LexicalPlainTextPlugin'; import { PlainTextPlugin } from '@lexical/react/LexicalPlainTextPlugin';
import { ContentEditable } from '@lexical/react/LexicalContentEditable'; import { ContentEditable } from '@lexical/react/LexicalContentEditable';
@@ -75,7 +75,7 @@ export default function Editor({
}; };
const initialY = useRef(0); const initialY = useRef(0);
const handleMouseDown = (e: React.MouseEvent) => { const handleMouseDown = useCallback((e: React.MouseEvent) => {
initialY.current = e.clientY; initialY.current = e.clientY;
const handleMouseMove = (e: MouseEvent) => { const handleMouseMove = (e: MouseEvent) => {
@@ -91,7 +91,7 @@ export default function Editor({
document.addEventListener('mousemove', handleMouseMove); document.addEventListener('mousemove', handleMouseMove);
document.addEventListener('mouseup', handleMouseUp); document.addEventListener('mouseup', handleMouseUp);
}; }, []);
useDeepCompareEffect(() => { useDeepCompareEffect(() => {
if (focus) return; if (focus) return;
@@ -153,8 +153,8 @@ export default function Editor({
}} }}
/> />
<VariableLabelPlugin variables={variableLabels} /> <VariableLabelPlugin variables={variableLabels} />
<VariableLabelPickerPlugin variables={variableLabels} isFocus={focus} />
<VariablePlugin variables={variables} /> <VariablePlugin variables={variables} />
<VariableLabelPickerPlugin variables={variableLabels} isFocus={focus} />
<VariablePickerPlugin variables={variableLabels.length > 0 ? [] : variables} /> <VariablePickerPlugin variables={variableLabels.length > 0 ? [] : variables} />
<OnBlurPlugin onBlur={onBlur} /> <OnBlurPlugin onBlur={onBlur} />
</LexicalComposer> </LexicalComposer>

View File

@@ -92,13 +92,13 @@
"loop_start_tip": "Not input array", "loop_start_tip": "Not input array",
"max_dialog_rounds": "Maximum Number of Dialog Rounds", "max_dialog_rounds": "Maximum Number of Dialog Rounds",
"max_tokens": "Maximum Tokens", "max_tokens": "Maximum Tokens",
"mouse_priority": "Mouse first", "mouse_priority": "Mouse first\n- Press the left button to drag the canvas\n- Hold down shift and left click to select batches",
"new_context": "New Context", "new_context": "New Context",
"not_contains": "Does Not Contain", "not_contains": "Does Not Contain",
"only_the_reference_type_is_supported": "Only reference type is supported", "only_the_reference_type_is_supported": "Only reference type is supported",
"optional_value_type": "Optional Value Type", "optional_value_type": "Optional Value Type",
"optional_value_type_tip": "You can specify one or more data types. When dynamically adding fields, users can only select the configured types.", "optional_value_type_tip": "You can specify one or more data types. When dynamically adding fields, users can only select the configured types.",
"pan_priority": "Touchpad first", "pan_priority": "Touchpad first\n- Click to batch select\n- Move the canvas with two fingers",
"pass_returned_object_as_output_to_next_nodes": "Pass the object returned in the code as output to the next nodes. The variable name needs to correspond to the return key.", "pass_returned_object_as_output_to_next_nodes": "Pass the object returned in the code as output to the next nodes. The variable name needs to correspond to the return key.",
"plugin.Instruction_Tip": "You can configure an instruction to explain the purpose of the plugin. This instruction will be displayed each time the plugin is used. Supports standard Markdown syntax.", "plugin.Instruction_Tip": "You can configure an instruction to explain the purpose of the plugin. This instruction will be displayed each time the plugin is used. Supports standard Markdown syntax.",
"plugin.Instructions": "Instructions", "plugin.Instructions": "Instructions",
@@ -130,12 +130,14 @@
"template.ai_chat_intro": "AI Large Model Chat", "template.ai_chat_intro": "AI Large Model Chat",
"template.dataset_search": "Dataset Search", "template.dataset_search": "Dataset Search",
"template.dataset_search_intro": "Use 'semantic search' and 'full-text search' capabilities to find potentially relevant reference content from the 'Dataset'.", "template.dataset_search_intro": "Use 'semantic search' and 'full-text search' capabilities to find potentially relevant reference content from the 'Dataset'.",
"template.forbid_stream": "Forbid stream mode",
"template.forbid_stream_desc": "Forces the output mode of nested application streams to be disabled",
"template.plugin_output": "Plugin output",
"template.plugin_start": "Plugin start",
"template.system_config": "System Configuration", "template.system_config": "System Configuration",
"template.tool_call": "Tool Call", "template.tool_call": "Tool Call",
"template.tool_call_intro": "Automatically select one or more functional blocks for calling through the AI model, or call plugins.", "template.tool_call_intro": "Automatically select one or more functional blocks for calling through the AI model, or call plugins.",
"template.workflow_start": "Workflow Start", "template.workflow_start": "Workflow Start",
"template.plugin_output": "Plugin output",
"template.plugin_start": "Plugin start",
"text_concatenation": "Text Concatenation", "text_concatenation": "Text Concatenation",
"text_content_extraction": "Text Content Extraction", "text_content_extraction": "Text Content Extraction",
"text_to_extract": "Text to Extract", "text_to_extract": "Text to Extract",
@@ -154,4 +156,4 @@
"workflow.Switch_success": "Switch Successful", "workflow.Switch_success": "Switch Successful",
"workflow.Team cloud": "Team Cloud", "workflow.Team cloud": "Team Cloud",
"workflow.exit_tips": "Your changes have not been saved. 'Exit directly' will not save your edits." "workflow.exit_tips": "Your changes have not been saved. 'Exit directly' will not save your edits."
} }

View File

@@ -247,7 +247,7 @@
"core.ai.Not deploy rerank model": "未部署重排模型", "core.ai.Not deploy rerank model": "未部署重排模型",
"core.ai.Prompt": "提示词", "core.ai.Prompt": "提示词",
"core.ai.Support tool": "函数调用", "core.ai.Support tool": "函数调用",
"core.ai.model.Dataset Agent Model": "文件处理模型", "core.ai.model.Dataset Agent Model": "文本理解模型",
"core.ai.model.Vector Model": "索引模型", "core.ai.model.Vector Model": "索引模型",
"core.ai.model.doc_index_and_dialog": "文档索引 & 对话索引", "core.ai.model.doc_index_and_dialog": "文档索引 & 对话索引",
"core.app.Ai response": "返回 AI 内容", "core.app.Ai response": "返回 AI 内容",
@@ -528,7 +528,7 @@
"core.dataset.externalFile": "外部文件库", "core.dataset.externalFile": "外部文件库",
"core.dataset.file": "文件", "core.dataset.file": "文件",
"core.dataset.folder": "目录", "core.dataset.folder": "目录",
"core.dataset.import.Auto mode Estimated Price Tips": "需调用文件处理模型,需要消耗较多 tokens{{price}} 积分/1K tokens", "core.dataset.import.Auto mode Estimated Price Tips": "需调用文本理解模型,需要消耗较多 tokens{{price}} 积分/1K tokens",
"core.dataset.import.Auto process": "自动", "core.dataset.import.Auto process": "自动",
"core.dataset.import.Auto process desc": "自动设置分割和预处理规则", "core.dataset.import.Auto process desc": "自动设置分割和预处理规则",
"core.dataset.import.Chunk Range": "范围:{{min}}~{{max}}", "core.dataset.import.Chunk Range": "范围:{{min}}~{{max}}",
@@ -555,7 +555,7 @@
"core.dataset.import.Preview chunks": "预览分段(最多 5 段)", "core.dataset.import.Preview chunks": "预览分段(最多 5 段)",
"core.dataset.import.Preview raw text": "预览源文本(最多 3000 字)", "core.dataset.import.Preview raw text": "预览源文本(最多 3000 字)",
"core.dataset.import.Process way": "处理方式", "core.dataset.import.Process way": "处理方式",
"core.dataset.import.QA Estimated Price Tips": "需调用文件处理模型,需要消耗较多 AI 积分:{{price}} 积分/1K tokens", "core.dataset.import.QA Estimated Price Tips": "需调用文本理解模型,需要消耗较多 AI 积分:{{price}} 积分/1K tokens",
"core.dataset.import.QA Import": "QA 拆分", "core.dataset.import.QA Import": "QA 拆分",
"core.dataset.import.QA Import Tip": "根据一定规则,将文本拆成一段较大的段落,调用 AI 为该段落生成问答对。有非常高的检索精度,但是会丢失很多内容细节。", "core.dataset.import.QA Import Tip": "根据一定规则,将文本拆成一段较大的段落,调用 AI 为该段落生成问答对。有非常高的检索精度,但是会丢失很多内容细节。",
"core.dataset.import.Select file": "选择文件", "core.dataset.import.Select file": "选择文件",
@@ -1187,4 +1187,4 @@
"verification": "验证", "verification": "验证",
"xx_search_result": "{{key}} 的搜索结果", "xx_search_result": "{{key}} 的搜索结果",
"yes": "是" "yes": "是"
} }

View File

@@ -98,13 +98,13 @@
"loop_start_tip": "未输入数组", "loop_start_tip": "未输入数组",
"max_dialog_rounds": "最多携带多少轮对话记录", "max_dialog_rounds": "最多携带多少轮对话记录",
"max_tokens": "最大 Tokens", "max_tokens": "最大 Tokens",
"mouse_priority": "鼠标优先", "mouse_priority": "鼠标优先\n- 左键按下后可拖动画布\n- 按住 shift 后左键可批量选择",
"new_context": "新的上下文", "new_context": "新的上下文",
"not_contains": "不包含", "not_contains": "不包含",
"only_the_reference_type_is_supported": "仅支持引用类型", "only_the_reference_type_is_supported": "仅支持引用类型",
"optional_value_type": "可选的数据类型", "optional_value_type": "可选的数据类型",
"optional_value_type_tip": "可以指定 1 个或多个数据类型,用户在动态添加字段时,仅可选择配置的类型", "optional_value_type_tip": "可以指定 1 个或多个数据类型,用户在动态添加字段时,仅可选择配置的类型",
"pan_priority": "触摸板优先", "pan_priority": "触摸板优先\n- 单击批量选择\n- 双指移动画布",
"pass_returned_object_as_output_to_next_nodes": "将代码中 return 的对象作为输出,传递给后续的节点。变量名需要对应 return 的 key", "pass_returned_object_as_output_to_next_nodes": "将代码中 return 的对象作为输出,传递给后续的节点。变量名需要对应 return 的 key",
"plugin.Instruction_Tip": "可以配置一段说明,以解释该插件的用途。每次使用插件前,会显示该段说明。支持标准 Markdown 语法。", "plugin.Instruction_Tip": "可以配置一段说明,以解释该插件的用途。每次使用插件前,会显示该段说明。支持标准 Markdown 语法。",
"plugin.Instructions": "使用说明", "plugin.Instructions": "使用说明",
@@ -136,12 +136,14 @@
"template.ai_chat_intro": "AI 大模型对话", "template.ai_chat_intro": "AI 大模型对话",
"template.dataset_search": "知识库搜索", "template.dataset_search": "知识库搜索",
"template.dataset_search_intro": "调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容", "template.dataset_search_intro": "调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容",
"template.forbid_stream": "禁用流输出",
"template.forbid_stream_desc": "强制设置嵌套运行的应用,均以非流模式运行",
"template.plugin_output": "插件输出",
"template.plugin_start": "插件开始",
"template.system_config": "系统配置", "template.system_config": "系统配置",
"template.tool_call": "工具调用", "template.tool_call": "工具调用",
"template.tool_call_intro": "通过AI模型自动选择一个或多个功能块进行调用也可以对插件进行调用。", "template.tool_call_intro": "通过AI模型自动选择一个或多个功能块进行调用也可以对插件进行调用。",
"template.workflow_start": "流程开始", "template.workflow_start": "流程开始",
"template.plugin_output": "插件输出",
"template.plugin_start": "插件开始",
"text_concatenation": "文本拼接", "text_concatenation": "文本拼接",
"text_content_extraction": "文本内容提取", "text_content_extraction": "文本内容提取",
"text_to_extract": "需要提取的文本", "text_to_extract": "需要提取的文本",
@@ -160,4 +162,4 @@
"workflow.Switch_success": "切换成功", "workflow.Switch_success": "切换成功",
"workflow.Team cloud": "团队云端", "workflow.Team cloud": "团队云端",
"workflow.exit_tips": "您的更改尚未保存,「直接退出」将不会保存您的编辑记录。" "workflow.exit_tips": "您的更改尚未保存,「直接退出」将不会保存您的编辑记录。"
} }

View File

@@ -20,7 +20,7 @@
"charsPointsPrice": 0, // n积分/1k token商业版 "charsPointsPrice": 0, // n积分/1k token商业版
"censor": false, // 是否开启敏感校验(商业版) "censor": false, // 是否开启敏感校验(商业版)
"vision": true, // 是否支持图片输入 "vision": true, // 是否支持图片输入
"datasetProcess": true, // 是否设置为知识库处理模型QA务必保证至少有一个为true否则知识库会报错 "datasetProcess": true, // 是否设置为文本理解模型QA务必保证至少有一个为true否则知识库会报错
"usedInClassify": true, // 是否用于问题分类务必保证至少有一个为true "usedInClassify": true, // 是否用于问题分类务必保证至少有一个为true
"usedInExtractFields": true, // 是否用于内容提取务必保证至少有一个为true "usedInExtractFields": true, // 是否用于内容提取务必保证至少有一个为true
"usedInToolCall": true, // 是否用于工具调用务必保证至少有一个为true "usedInToolCall": true, // 是否用于工具调用务必保证至少有一个为true
@@ -44,7 +44,7 @@
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": true, "vision": true,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -68,7 +68,7 @@
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,
@@ -97,7 +97,7 @@
"charsPointsPrice": 0, "charsPointsPrice": 0,
"censor": false, "censor": false,
"vision": false, "vision": false,
"datasetProcess": false, "datasetProcess": true,
"usedInClassify": true, "usedInClassify": true,
"usedInExtractFields": true, "usedInExtractFields": true,
"usedInToolCall": true, "usedInToolCall": true,

View File

@@ -56,6 +56,7 @@ const Layout = ({ children }: { children: JSX.Element }) => {
[router.pathname, router.query] [router.pathname, router.query]
); );
// System hook
const { data, refetch: refetchUnRead } = useQuery(['getUnreadCount'], getUnreadCount, { const { data, refetch: refetchUnRead } = useQuery(['getUnreadCount'], getUnreadCount, {
enabled: !!userInfo && !!feConfigs.isPlus, enabled: !!userInfo && !!feConfigs.isPlus,
refetchInterval: 10000 refetchInterval: 10000

View File

@@ -11,11 +11,25 @@ import { useInitApp } from '@/web/context/useInitApp';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import '@/web/styles/reset.scss'; import '@/web/styles/reset.scss';
import NextHead from '@/components/common/NextHead'; import NextHead from '@/components/common/NextHead';
import { useEffect } from 'react';
function App({ Component, pageProps }: AppProps) { function App({ Component, pageProps }: AppProps) {
const { feConfigs, scripts, title } = useInitApp(); const { feConfigs, scripts, title } = useInitApp();
const { t } = useTranslation(); const { t } = useTranslation();
// Forbid touch scale
useEffect(() => {
document.addEventListener(
'wheel',
function (e) {
if (e.ctrlKey && Math.abs(e.deltaY) !== 0) {
e.preventDefault();
}
},
{ passive: false }
);
}, []);
return ( return (
<> <>
<NextHead <NextHead

View File

@@ -3,7 +3,7 @@
*/ */
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { import {
getPluginPreviewNode, getChildAppPreviewNode,
splitCombinePluginId splitCombinePluginId
} from '@fastgpt/service/core/app/plugin/controller'; } from '@fastgpt/service/core/app/plugin/controller';
import { FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node.d'; import { FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node.d';
@@ -27,7 +27,7 @@ async function handler(
await authApp({ req, authToken: true, appId, per: ReadPermissionVal }); await authApp({ req, authToken: true, appId, per: ReadPermissionVal });
} }
return getPluginPreviewNode({ id: appId }); return getChildAppPreviewNode({ id: appId });
} }
export default NextAPI(handler); export default NextAPI(handler);

View File

@@ -116,8 +116,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
const workflowResponseWrite = getWorkflowResponseWrite({ const workflowResponseWrite = getWorkflowResponseWrite({
res, res,
detail: true, detail: true,
streamResponse: true, streamResponse: true
id: getNanoid(24)
}); });
/* start process */ /* start process */

View File

@@ -240,7 +240,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
res, res,
detail, detail,
streamResponse: stream, streamResponse: stream,
id: chatId || getNanoid(24) id: chatId
}); });
/* start flow controller */ /* start flow controller */

View File

@@ -262,7 +262,8 @@ const Header = () => {
await onClickSave({}); await onClickSave({});
toast({ toast({
status: 'success', status: 'success',
title: t('app:saved_success') title: t('app:saved_success'),
position: 'top-right'
}); });
onClose(); onClose();
setIsSave(false); setIsSave(false);
@@ -332,7 +333,8 @@ const Header = () => {
onBack(); onBack();
toast({ toast({
status: 'success', status: 'success',
title: t('app:saved_success') title: t('app:saved_success'),
position: 'top-right'
}); });
}} }}
> >

View File

@@ -85,7 +85,8 @@ const Header = ({
}); });
toast({ toast({
status: 'success', status: 'success',
title: t('app:publish_success') title: t('app:publish_success'),
position: 'top-right'
}); });
}, },
[onSaveApp, t, toast] [onSaveApp, t, toast]

View File

@@ -264,7 +264,8 @@ const Header = () => {
await onClickSave({}); await onClickSave({});
toast({ toast({
status: 'success', status: 'success',
title: t('app:saved_success') title: t('app:saved_success'),
position: 'top-right'
}); });
onClose(); onClose();
setIsSave(false); setIsSave(false);
@@ -334,7 +335,8 @@ const Header = () => {
onBack(); onBack();
toast({ toast({
status: 'success', status: 'success',
title: t('app:saved_success') title: t('app:saved_success'),
position: 'top-right'
}); });
}} }}
> >

View File

@@ -67,7 +67,8 @@ const SaveAndPublishModal = ({
await onClickSave({ ...data, isPublish: true }); await onClickSave({ ...data, isPublish: true });
toast({ toast({
status: 'success', status: 'success',
title: t('app:publish_success') title: t('app:publish_success'),
position: 'top-right'
}); });
onClose(); onClose();
})} })}

View File

@@ -89,6 +89,7 @@ const InputLabel = ({ nodeId, input }: Props) => {
required, required,
selectedTypeIndex, selectedTypeIndex,
t, t,
valueDesc,
valueType valueType
]); ]);

View File

@@ -344,7 +344,7 @@ const WorkflowContextProvider = ({
const [workflowControlMode, setWorkflowControlMode] = useLocalStorageState<'drag' | 'select'>( const [workflowControlMode, setWorkflowControlMode] = useLocalStorageState<'drag' | 'select'>(
'workflow-control-mode', 'workflow-control-mode',
{ {
defaultValue: 'select', defaultValue: 'drag',
listenStorageChange: true listenStorageChange: true
} }
); );
@@ -782,10 +782,12 @@ const WorkflowContextProvider = ({
/* snapshots */ /* snapshots */
const [past, setPast] = useLocalStorageState<SnapshotsType[]>(`${appId}-past`, { const [past, setPast] = useLocalStorageState<SnapshotsType[]>(`${appId}-past`, {
defaultValue: [] defaultValue: [],
listenStorageChange: true
}) as [SnapshotsType[], (value: SetStateAction<SnapshotsType[]>) => void]; }) as [SnapshotsType[], (value: SetStateAction<SnapshotsType[]>) => void];
const [future, setFuture] = useLocalStorageState<SnapshotsType[]>(`${appId}-future`, { const [future, setFuture] = useLocalStorageState<SnapshotsType[]>(`${appId}-future`, {
defaultValue: [] defaultValue: [],
listenStorageChange: true
}) as [SnapshotsType[], (value: SetStateAction<SnapshotsType[]>) => void]; }) as [SnapshotsType[], (value: SetStateAction<SnapshotsType[]>) => void];
const resetSnapshot = useMemoizedFn((state: SnapshotsType) => { const resetSnapshot = useMemoizedFn((state: SnapshotsType) => {

View File

@@ -25,10 +25,17 @@ export const uiWorkflow2StoreWorkflow = ({
version: item.data.version, version: item.data.version,
inputs: item.data.inputs, inputs: item.data.inputs,
outputs: item.data.outputs, outputs: item.data.outputs,
pluginId: item.data.pluginId, pluginId: item.data.pluginId
parentNodeId: item.data.parentNodeId
})); }));
// get all handle
const reactFlowViewport = document.querySelector('.react-flow__viewport');
// Gets the value of data-handleid on all elements below it whose data-handleid is not empty
const handleList =
reactFlowViewport?.querySelectorAll('[data-handleid]:not([data-handleid=""])') || [];
const handleIdList = Array.from(handleList).map(
(item) => item.getAttribute('data-handleid') || ''
);
const formatEdges: StoreEdgeItemType[] = edges const formatEdges: StoreEdgeItemType[] = edges
.map((item) => ({ .map((item) => ({
source: item.source, source: item.source,
@@ -36,7 +43,15 @@ export const uiWorkflow2StoreWorkflow = ({
sourceHandle: item.sourceHandle || '', sourceHandle: item.sourceHandle || '',
targetHandle: item.targetHandle || '' targetHandle: item.targetHandle || ''
})) }))
.filter((item) => item.sourceHandle && item.targetHandle); .filter((item) => item.sourceHandle && item.targetHandle)
.filter(
// Filter out edges that do not have both sourceHandle and targetHandle
(item) => {
// Not in react flow page
if (!reactFlowViewport) return true;
return handleIdList.includes(item.sourceHandle) && handleIdList.includes(item.targetHandle);
}
);
return { return {
nodes: formatNodes, nodes: formatNodes,

View File

@@ -113,7 +113,7 @@ const Chat = ({
if (e?.code === 501) { if (e?.code === 501) {
router.replace('/app/list'); router.replace('/app/list');
} else if (chatId) { } else if (chatId) {
onChangeChatId(''); onChangeChatId();
} }
}, },
onFinally() { onFinally() {

View File

@@ -2,12 +2,13 @@ import React, { useCallback, useMemo, useRef, useState } from 'react';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent } from '@chakra-ui/react'; import { Box, Flex, Drawer, DrawerOverlay, DrawerContent } from '@chakra-ui/react';
import { streamFetch } from '@/web/common/api/fetch'; import { streamFetch } from '@/web/common/api/fetch';
import { useShareChatStore } from '@/web/core/chat/storeShareChat';
import SideBar from '@/components/SideBar'; import SideBar from '@/components/SideBar';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt'; import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { customAlphabet } from 'nanoid'; import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12); const nanoid = customAlphabet(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWSYZ1234567890_',
24
);
import ChatBox from '@/components/core/chat/ChatContainer/ChatBox'; import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type'; import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type';
@@ -16,7 +17,7 @@ import ChatHeader from './components/ChatHeader';
import ChatHistorySlider from './components/ChatHistorySlider'; import ChatHistorySlider from './components/ChatHistorySlider';
import { serviceSideProps } from '@/web/common/utils/i18n'; import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import { delChatRecordById, getChatHistories, getInitOutLinkChatInfo } from '@/web/core/chat/api'; import { delChatRecordById, getInitOutLinkChatInfo } from '@/web/core/chat/api';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils'; import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants'; import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema'; import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
@@ -36,6 +37,7 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import dynamic from 'next/dynamic'; import dynamic from 'next/dynamic';
import { useSystem } from '@fastgpt/web/hooks/useSystem'; import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useShareChatStore } from '@/web/core/chat/storeShareChat';
const CustomPluginRunBox = dynamic(() => import('./components/CustomPluginRunBox')); const CustomPluginRunBox = dynamic(() => import('./components/CustomPluginRunBox'));
type Props = { type Props = {
@@ -46,7 +48,14 @@ type Props = {
authToken: string; authToken: string;
}; };
const OutLink = ({ appName, appIntro, appAvatar }: Props) => { const OutLink = ({
outLinkUid,
appName,
appIntro,
appAvatar
}: Props & {
outLinkUid: string;
}) => {
const { t } = useTranslation(); const { t } = useTranslation();
const router = useRouter(); const router = useRouter();
const { const {
@@ -69,14 +78,9 @@ const OutLink = ({ appName, appIntro, appAvatar }: Props) => {
const [isEmbed, setIdEmbed] = useState(true); const [isEmbed, setIdEmbed] = useState(true);
const [chatData, setChatData] = useState<InitChatResponse>(defaultChatData); const [chatData, setChatData] = useState<InitChatResponse>(defaultChatData);
const appId = chatData.appId;
const { localUId } = useShareChatStore();
const outLinkUid: string = authToken || localUId;
const { const {
onUpdateHistoryTitle, onUpdateHistoryTitle,
loadHistories,
onUpdateHistory, onUpdateHistory,
onClearHistories, onClearHistories,
onDelHistory, onDelHistory,
@@ -212,7 +216,7 @@ const OutLink = ({ appName, appIntro, appAvatar }: Props) => {
onError(e: any) { onError(e: any) {
console.log(e); console.log(e);
if (chatId) { if (chatId) {
onChangeChatId(''); onChangeChatId();
} }
}, },
onFinally() { onFinally() {
@@ -352,16 +356,21 @@ const OutLink = ({ appName, appIntro, appAvatar }: Props) => {
const Render = (props: Props) => { const Render = (props: Props) => {
const { shareId, authToken } = props; const { shareId, authToken } = props;
const { localUId } = useShareChatStore(); const { localUId, setLocalUId } = useShareChatStore();
const outLinkUid: string = authToken || localUId;
const contextParams = useMemo(() => { const contextParams = useMemo(() => {
return { shareId, outLinkUid }; if (!localUId) {
}, [shareId, outLinkUid]); const localId = `shareChat-${Date.now()}-${nanoid()}`;
setLocalUId(localId);
return { shareId, outLinkUid: authToken || localId };
}
return { shareId, outLinkUid: authToken || localUId };
}, []);
return ( return (
<ChatContextProvider params={contextParams}> <ChatContextProvider params={contextParams}>
<OutLink {...props} />; <OutLink {...props} outLinkUid={contextParams.outLinkUid} />;
</ChatContextProvider> </ChatContextProvider>
); );
}; };

View File

@@ -166,7 +166,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
status: 'error' status: 'error'
}); });
if (chatId) { if (chatId) {
onChangeChatId(''); onChangeChatId();
} }
}, },
onFinally() { onFinally() {

View File

@@ -287,9 +287,9 @@ function List() {
<HStack> <HStack>
{isPc && ( {isPc && (
<HStack spacing={1} className="time"> <HStack spacing={1} className="time">
<MyIcon name={'history'} w={'0.85rem'} color={'myGray.400'} /> <Avatar src={dataset.vectorModel.avatar} w={'0.85rem'} />
<Box color={'myGray.500'} fontSize={'mini'}> <Box color={'myGray.500'} fontSize={'mini'}>
{formatTimeToChatTime(dataset.updateTime)} {dataset.vectorModel.name}
</Box> </Box>
</HStack> </HStack>
)} )}

View File

@@ -195,7 +195,7 @@ const ChatContextProvider = ({
setHistories([]); setHistories([]);
}, },
onFinally() { onFinally() {
onChangeChatId(''); onChangeChatId();
} }
} }
); );

View File

@@ -1,22 +1,22 @@
import { create } from 'zustand'; import { create } from 'zustand';
import { devtools, persist } from 'zustand/middleware'; import { devtools, persist } from 'zustand/middleware';
import { immer } from 'zustand/middleware/immer'; import { immer } from 'zustand/middleware/immer';
import type { ChatHistoryItemType } from '@fastgpt/global/core/chat/type.d';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWSYZ1234567890_',
24
);
type State = { type State = {
localUId: string; localUId: string;
setLocalUId: (id: string) => void;
}; };
export const useShareChatStore = create<State>()( export const useShareChatStore = create<State>()(
devtools( devtools(
persist( persist(
immer((set, get) => ({ immer((set, get) => ({
localUId: `shareChat-${Date.now()}-${nanoid()}` localUId: '',
setLocalUId(id) {
set((state) => {
state.localUId = id;
});
}
})), })),
{ {
name: 'shareChatStore' name: 'shareChatStore'

View File

@@ -90,7 +90,7 @@ export const useUserStore = create<State>()(
if (!useSystemStore.getState()?.feConfigs?.isPlus) return []; if (!useSystemStore.getState()?.feConfigs?.isPlus) return [];
const randomRefresh = Math.random() > 0.7; const randomRefresh = Math.random() > 0.7;
if (!randomRefresh && !init && get().teamMembers.length) if (!randomRefresh && !init && get().teamMembers?.length)
return Promise.resolve(get().teamMembers); return Promise.resolve(get().teamMembers);
const res = await getTeamMembers(); const res = await getTeamMembers();