Files
FastGPT/packages/service/core/app/utils.ts
Archer 13b7e0a192 V4.11.0 features (#5270)
* feat: workflow catch error (#5220)

* feat: error catch

* feat: workflow catch error

* perf: add catch error to node

* feat: system tool error catch

* catch error

* fix: ts

* update doc

* perf: training queue code (#5232)

* doc

* perf: training queue code

* Feat: 优化错误提示与重试逻辑 (#5192)

* feat: 批量重试异常数据 & 报错信息国际化

  - 新增“全部重试”按钮,支持批量重试所有训练异常数据
  - 报错信息支持国际化,常见错误自动映射为 i18n key
  - 相关文档和 i18n 资源已同步更新

* feat: enhance error message and retry mechanism

* feat: enhance error message and retry mechanism

* feat: add retry_failed i18n key

* feat: enhance error message and retry mechanism

* feat: enhance error message and retry mechanism

* feat: enhance error message and retry mechanism : 5

* feat: enhance error message and retry mechanism : 6

* feat: enhance error message and retry mechanism : 7

* feat: enhance error message and retry mechanism : 8

* perf: catch chat error

* perf: copy hook (#5246)

* perf: copy hook

* doc

* doc

* add app evaluation (#5083)

* add app evaluation

* fix

* usage

* variables

* editing condition

* var ui

* isplus filter

* migrate code

* remove utils

* name

* update type

* build

* fix

* fix

* fix

* delete comment

* fix

* perf: eval code

* eval code

* eval code

* feat: ttfb time in model log

* Refactor chat page (#5253)

* feat: update side bar layout; add login and logout logic at chat page

* refactor: encapsulate login logic and reuse it in `LoginModal` and `Login` page

* chore: improve some logics and comments

* chore: improve some logics

* chore: remove redundant side effect; add translations

---------

Co-authored-by: Archer <545436317@qq.com>

* perf: chat page code

* doc

* perf: provider redirect

* chore: ui improvement (#5266)

* Fix: SSE

* Fix: SSE

* eval pagination (#5264)

* eval scroll pagination

* change eval list to manual pagination

* number

* fix build

* fix

* version doc (#5267)

* version doc

* version doc

* doc

* feat: eval model select

* config eval model

* perf: eval detail modal ui

* doc

* doc

* fix: chat store reload

* doc

---------

Co-authored-by: colnii <1286949794@qq.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: 酒川户 <76519998+chuanhu9@users.noreply.github.com>
2025-07-22 09:42:50 +08:00

197 lines
5.9 KiB
TypeScript

import { MongoDataset } from '../dataset/schema';
import { getEmbeddingModel } from '../ai/model';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
import { getChildAppPreviewNode, splitCombinePluginId } from './plugin/controller';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { authAppByTmbId } from '../../support/permission/app/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { getErrText } from '@fastgpt/global/common/error/utils';
export async function listAppDatasetDataByTeamIdAndDatasetIds({
teamId,
datasetIdList
}: {
teamId?: string;
datasetIdList: string[];
}) {
const myDatasets = await MongoDataset.find({
_id: { $in: datasetIdList },
...(teamId && { teamId })
}).lean();
return myDatasets.map((item) => ({
datasetId: String(item._id),
avatar: item.avatar,
name: item.name,
vectorModel: getEmbeddingModel(item.vectorModel)
}));
}
export async function rewriteAppWorkflowToDetail({
nodes,
teamId,
isRoot,
ownerTmbId
}: {
nodes: StoreNodeItemType[];
teamId: string;
isRoot: boolean;
ownerTmbId: string;
}) {
const datasetIdSet = new Set<string>();
/* Add node(App Type) versionlabel and latest sign ==== */
await Promise.all(
nodes.map(async (node) => {
if (!node.pluginId) return;
const { source, pluginId } = splitCombinePluginId(node.pluginId);
try {
const [preview] = await Promise.all([
getChildAppPreviewNode({
appId: pluginId,
versionId: node.version
}),
...(source === PluginSourceEnum.personal
? [
authAppByTmbId({
tmbId: ownerTmbId,
appId: pluginId,
per: ReadPermissionVal
})
]
: [])
]);
node.pluginData = {
diagram: preview.diagram,
userGuide: preview.userGuide,
courseUrl: preview.courseUrl,
name: preview.name,
avatar: preview.avatar
};
node.versionLabel = preview.versionLabel;
node.isLatestVersion = preview.isLatestVersion;
node.version = preview.version;
node.currentCost = preview.currentCost;
node.hasTokenFee = preview.hasTokenFee;
node.hasSystemSecret = preview.hasSystemSecret;
// Latest version
if (!node.version) {
const inputsMap = new Map(node.inputs.map((item) => [item.key, item]));
const outputsMap = new Map(node.outputs.map((item) => [item.key, item]));
node.inputs = preview.inputs.map((item) => {
const input = inputsMap.get(item.key);
return {
...item,
value: input?.value,
selectedTypeIndex: input?.selectedTypeIndex
};
});
node.outputs = preview.outputs.map((item) => {
const output = outputsMap.get(item.key);
return {
...item,
value: output?.value
};
});
}
} catch (error) {
node.pluginData = {
error: getErrText(error)
};
}
})
);
// Get all dataset ids from nodes
nodes.forEach((node) => {
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
const input = node.inputs.find((item) => item.key === NodeInputKeyEnum.datasetSelectList);
if (!input) return;
const rawValue = input.value as undefined | { datasetId: string }[] | { datasetId: string };
if (!rawValue) return;
const datasetIds = Array.isArray(rawValue)
? rawValue.map((v) => v?.datasetId).filter((id) => !!id && typeof id === 'string')
: rawValue?.datasetId
? [String(rawValue.datasetId)]
: [];
datasetIds.forEach((id) => datasetIdSet.add(id));
});
if (datasetIdSet.size === 0) return;
// Load dataset list
const datasetList = await listAppDatasetDataByTeamIdAndDatasetIds({
teamId: isRoot ? undefined : teamId,
datasetIdList: Array.from(datasetIdSet)
});
const datasetMap = new Map(datasetList.map((ds) => [String(ds.datasetId), ds]));
// Rewrite dataset ids, add dataset info to nodes
if (datasetList.length > 0) {
nodes.forEach((node) => {
if (node.flowNodeType !== FlowNodeTypeEnum.datasetSearchNode) return;
node.inputs.forEach((item) => {
if (item.key !== NodeInputKeyEnum.datasetSelectList) return;
const val = item.value as undefined | { datasetId: string }[] | { datasetId: string };
if (Array.isArray(val)) {
item.value = val
.map((v) => {
const data = datasetMap.get(String(v.datasetId));
if (!data)
return {
datasetId: v.datasetId,
avatar: '',
name: 'Dataset not found',
vectorModel: ''
};
return {
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
};
})
.filter(Boolean);
} else if (typeof val === 'object' && val !== null) {
const data = datasetMap.get(String(val.datasetId));
if (!data) {
item.value = [
{
datasetId: val.datasetId,
avatar: '',
name: 'Dataset not found',
vectorModel: ''
}
];
} else {
item.value = [
{
datasetId: data.datasetId,
avatar: data.avatar,
name: data.name,
vectorModel: data.vectorModel
}
];
}
}
});
});
}
return nodes;
}