Files
FastGPT/packages/service/core/app/controller.ts
Archer fb368a581c Perf input guide (#1557)
* perf: input guide code

* perf: input guide ui

* Chat input guide api

* Update app chat config store

* perf: app chat config field

* perf: app context

* perf: params

* fix: ts

* perf: filter private config

* perf: filter private config

* perf: import workflow

* perf: limit max tip amount
2024-05-21 17:52:04 +08:00

68 lines
1.7 KiB
TypeScript

import { AppSchema } from '@fastgpt/global/core/app/type';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { getLLMModel } from '../ai/model';
import { MongoAppVersion } from './version/schema';
export const beforeUpdateAppFormat = <T extends AppSchema['modules'] | undefined>({
nodes
}: {
nodes: T;
}) => {
if (nodes) {
let maxTokens = 3000;
nodes.forEach((item) => {
if (
item.flowNodeType === FlowNodeTypeEnum.chatNode ||
item.flowNodeType === FlowNodeTypeEnum.tools
) {
const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const chatModel = getLLMModel(model);
const quoteMaxToken = chatModel.quoteMaxToken || 3000;
maxTokens = Math.max(maxTokens, quoteMaxToken);
}
});
nodes.forEach((item) => {
if (item.flowNodeType === FlowNodeTypeEnum.datasetSearchNode) {
item.inputs.forEach((input) => {
if (input.key === NodeInputKeyEnum.datasetMaxTokens) {
const val = input.value as number;
if (val > maxTokens) {
input.value = maxTokens;
}
}
});
}
});
}
return {
nodes
};
};
export const getAppLatestVersion = async (appId: string, app?: AppSchema) => {
const version = await MongoAppVersion.findOne({
appId
}).sort({
time: -1
});
if (version) {
return {
nodes: version.nodes,
edges: version.edges,
chatConfig: version.chatConfig || app?.chatConfig || {}
};
}
return {
nodes: app?.modules || [],
edges: app?.edges || [],
chatConfig: app?.chatConfig || {}
};
};