mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-21 11:43:56 +00:00
4.8.11 perf (#2832)
* save toast * perf: surya ocr * perf: remove same model name * fix: indexes * perf: ip check * feat: Fixed the version number of the subapplication * feat: simple app get latest child version * perf: update child dispatch variables * feat: variables update doc
This commit is contained in:
@@ -95,17 +95,19 @@ weight: 813
|
||||
7. 新增 - 支持 Openai o1 模型,需增加模型的 `defaultConfig` 配置,覆盖 `temperature`、`max_tokens` 和 `stream`配置,o1 不支持 stream 模式, 详细可重新拉取 `config.json` 配置文件查看。
|
||||
8. 新增 - AI 对话节点知识库引用,支持配置 role=system 和 role=user,已配置的过自定义提示词的节点将会保持 user 模式,其余用户将转成 system 模式。
|
||||
9. 新增 - 插件支持上传系统文件。
|
||||
10. 新增 - 插件输出,支持指定字段作为工具响应。
|
||||
11. 新增 - 支持工作流嵌套子应用时,可以设置`非流模式`,同时简易模式也可以选择工作流作为插件了,简易模式调用子应用时,都将强制使用非流模式。
|
||||
12. 新增 - 调试模式下,子应用调用,支持返回详细运行数据。
|
||||
13. 新增 - 保留所有模式下子应用嵌套调用的日志。
|
||||
14. 优化 - 工作流嵌套层级限制 20 层,避免因编排不合理导致的无限死循环。
|
||||
15. 优化 - 工作流 handler 性能优化。
|
||||
16. 优化 - 工作流快捷键,避免调试测试时也会触发复制和回退。
|
||||
17. 优化 - 流输出,切换浏览器 Tab 后仍可以继续输出。
|
||||
18. 优化 - 完善外部文件知识库相关 API
|
||||
19. 修复 - 知识库选择权限问题。
|
||||
20. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常。
|
||||
21. 修复 - createDataset 接口,intro 为赋值。
|
||||
22. 修复 - 对话框渲染性能问题。
|
||||
23. 修复 - 工具调用历史记录存储不正确。
|
||||
10. 新增 - 子应用嵌套调用时,版本锁定。主应用未主动更新版本时,不会取最新版进行执行,保证主应用服务稳定。
|
||||
11. 新增 - 插件输出,支持指定字段作为工具响应。
|
||||
12. 新增 - 支持工作流嵌套子应用时,可以设置`非流模式`,同时简易模式也可以选择工作流作为插件了,简易模式调用子应用时,都将强制使用非流模式。
|
||||
13. 新增 - 调试模式下,子应用调用,支持返回详细运行数据。
|
||||
14. 新增 - 保留所有模式下子应用嵌套调用的日志。
|
||||
15. 优化 - 工作流嵌套层级限制 20 层,避免因编排不合理导致的无限死循环。
|
||||
16. 优化 - 工作流 handler 性能优化。
|
||||
17. 优化 - 工作流快捷键,避免调试测试时也会触发复制和回退。
|
||||
18. 修复 - 工作流工具调用中修改全局变量后,无法传递到后续流程。
|
||||
19. 优化 - 流输出,切换浏览器 Tab 后仍可以继续输出。
|
||||
20. 优化 - 完善外部文件知识库相关 API
|
||||
21. 修复 - 知识库选择权限问题。
|
||||
22. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常。
|
||||
23. 修复 - createDataset 接口,intro 为赋值。
|
||||
24. 修复 - 对话框渲染性能问题。
|
||||
25. 修复 - 工具调用历史记录存储不正确。
|
||||
|
@@ -79,6 +79,7 @@ export type RuntimeNodeItemType = {
|
||||
outputs: FlowNodeOutputItemType[];
|
||||
|
||||
pluginId?: string; // workflow id / plugin id
|
||||
version: string;
|
||||
};
|
||||
|
||||
export type PluginRuntimeType = {
|
||||
|
@@ -124,7 +124,8 @@ export const storeNodes2RuntimeNodes = (
|
||||
isEntry: entryNodeIds.includes(node.nodeId),
|
||||
inputs: node.inputs,
|
||||
outputs: node.outputs,
|
||||
pluginId: node.pluginId
|
||||
pluginId: node.pluginId,
|
||||
version: node.version
|
||||
};
|
||||
}) || []
|
||||
);
|
||||
|
@@ -51,6 +51,7 @@ export function reRankRecall({
|
||||
}));
|
||||
})
|
||||
.catch((err) => {
|
||||
console.log(err);
|
||||
addLog.error('rerank error', err);
|
||||
|
||||
return [];
|
||||
|
@@ -2,7 +2,6 @@ import { AppSchema } from '@fastgpt/global/core/app/type';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { getLLMModel } from '../ai/model';
|
||||
import { MongoAppVersion } from './version/schema';
|
||||
import { MongoApp } from './schema';
|
||||
|
||||
export const beforeUpdateAppFormat = <T extends AppSchema['modules'] | undefined>({
|
||||
@@ -46,30 +45,6 @@ export const beforeUpdateAppFormat = <T extends AppSchema['modules'] | undefined
|
||||
};
|
||||
};
|
||||
|
||||
export const getAppLatestVersion = async (appId: string, app?: AppSchema) => {
|
||||
const version = await MongoAppVersion.findOne({
|
||||
appId,
|
||||
isPublish: true
|
||||
})
|
||||
.sort({
|
||||
time: -1
|
||||
})
|
||||
.lean();
|
||||
|
||||
if (version) {
|
||||
return {
|
||||
nodes: version.nodes,
|
||||
edges: version.edges,
|
||||
chatConfig: version.chatConfig || app?.chatConfig || {}
|
||||
};
|
||||
}
|
||||
return {
|
||||
nodes: app?.modules || [],
|
||||
edges: app?.edges || [],
|
||||
chatConfig: app?.chatConfig || {}
|
||||
};
|
||||
};
|
||||
|
||||
/* Get apps */
|
||||
export async function findAppAndAllChildren({
|
||||
teamId,
|
||||
|
@@ -10,6 +10,7 @@ import { cloneDeep } from 'lodash';
|
||||
import { MongoApp } from '../schema';
|
||||
import { SystemPluginTemplateItemType } from '@fastgpt/global/core/workflow/type';
|
||||
import { getSystemPluginTemplates } from '../../../../plugins/register';
|
||||
import { getAppLatestVersion, getAppVersionById } from '../version/controller';
|
||||
|
||||
/*
|
||||
plugin id rule:
|
||||
@@ -34,38 +35,14 @@ export async function splitCombinePluginId(id: string) {
|
||||
return { source, pluginId: id };
|
||||
}
|
||||
|
||||
const getChildAppTemplateById = async (
|
||||
id: string
|
||||
): Promise<SystemPluginTemplateItemType & { teamId?: string }> => {
|
||||
const { source, pluginId } = await splitCombinePluginId(id);
|
||||
type ChildAppType = SystemPluginTemplateItemType & { teamId?: string };
|
||||
const getSystemPluginTemplateById = async (
|
||||
pluginId: string
|
||||
): Promise<SystemPluginTemplateItemType> => {
|
||||
const item = getSystemPluginTemplates().find((plugin) => plugin.id === pluginId);
|
||||
if (!item) return Promise.reject('plugin not found');
|
||||
|
||||
if (source === PluginSourceEnum.personal) {
|
||||
const item = await MongoApp.findById(id).lean();
|
||||
if (!item) return Promise.reject('plugin not found');
|
||||
|
||||
return {
|
||||
id: String(item._id),
|
||||
teamId: String(item.teamId),
|
||||
name: item.name,
|
||||
avatar: item.avatar,
|
||||
intro: item.intro,
|
||||
showStatus: true,
|
||||
workflow: {
|
||||
nodes: item.modules,
|
||||
edges: item.edges,
|
||||
chatConfig: item.chatConfig
|
||||
},
|
||||
templateType: FlowNodeTemplateTypeEnum.teamApp,
|
||||
version: item?.pluginData?.nodeVersion || defaultNodeVersion,
|
||||
originCost: 0,
|
||||
currentCost: 0
|
||||
};
|
||||
} else {
|
||||
const item = getSystemPluginTemplates().find((plugin) => plugin.id === pluginId);
|
||||
if (!item) return Promise.reject('plugin not found');
|
||||
|
||||
return cloneDeep(item);
|
||||
}
|
||||
return cloneDeep(item);
|
||||
};
|
||||
|
||||
/* format plugin modules to plugin preview module */
|
||||
@@ -74,7 +51,39 @@ export async function getChildAppPreviewNode({
|
||||
}: {
|
||||
id: string;
|
||||
}): Promise<FlowNodeTemplateType> {
|
||||
const app = await getChildAppTemplateById(id);
|
||||
const app: ChildAppType = await (async () => {
|
||||
const { source, pluginId } = await splitCombinePluginId(id);
|
||||
|
||||
if (source === PluginSourceEnum.personal) {
|
||||
const item = await MongoApp.findById(id).lean();
|
||||
if (!item) return Promise.reject('plugin not found');
|
||||
|
||||
const version = await getAppLatestVersion(id, item);
|
||||
|
||||
if (!version.versionId) return Promise.reject('App version not found');
|
||||
|
||||
return {
|
||||
id: String(item._id),
|
||||
teamId: String(item.teamId),
|
||||
name: item.name,
|
||||
avatar: item.avatar,
|
||||
intro: item.intro,
|
||||
showStatus: true,
|
||||
workflow: {
|
||||
nodes: version.nodes,
|
||||
edges: version.edges,
|
||||
chatConfig: version.chatConfig
|
||||
},
|
||||
templateType: FlowNodeTemplateTypeEnum.teamApp,
|
||||
version: version.versionId,
|
||||
originCost: 0,
|
||||
currentCost: 0
|
||||
};
|
||||
} else {
|
||||
return getSystemPluginTemplateById(pluginId);
|
||||
}
|
||||
})();
|
||||
|
||||
const isPlugin = !!app.workflow.nodes.find(
|
||||
(node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput
|
||||
);
|
||||
@@ -99,9 +108,51 @@ export async function getChildAppPreviewNode({
|
||||
};
|
||||
}
|
||||
|
||||
/* run plugin time */
|
||||
export async function getChildAppRuntimeById(id: string): Promise<PluginRuntimeType> {
|
||||
const app = await getChildAppTemplateById(id);
|
||||
/*
|
||||
Get runtime plugin data
|
||||
System plugin: plugin id
|
||||
Personal plugin: Version id
|
||||
*/
|
||||
export async function getChildAppRuntimeById(
|
||||
id: string,
|
||||
versionId?: string
|
||||
): Promise<PluginRuntimeType> {
|
||||
const app: ChildAppType = await (async () => {
|
||||
const { source, pluginId } = await splitCombinePluginId(id);
|
||||
|
||||
if (source === PluginSourceEnum.personal) {
|
||||
const item = await MongoApp.findById(id).lean();
|
||||
if (!item) return Promise.reject('plugin not found');
|
||||
|
||||
const version = await getAppVersionById({
|
||||
appId: id,
|
||||
versionId,
|
||||
app: item
|
||||
});
|
||||
|
||||
return {
|
||||
id: String(item._id),
|
||||
teamId: String(item.teamId),
|
||||
name: item.name,
|
||||
avatar: item.avatar,
|
||||
intro: item.intro,
|
||||
showStatus: true,
|
||||
workflow: {
|
||||
nodes: version.nodes,
|
||||
edges: version.edges,
|
||||
chatConfig: version.chatConfig
|
||||
},
|
||||
templateType: FlowNodeTemplateTypeEnum.teamApp,
|
||||
|
||||
// 用不到
|
||||
version: item?.pluginData?.nodeVersion || defaultNodeVersion,
|
||||
originCost: 0,
|
||||
currentCost: 0
|
||||
};
|
||||
} else {
|
||||
return getSystemPluginTemplateById(pluginId);
|
||||
}
|
||||
})();
|
||||
|
||||
return {
|
||||
id: app.id,
|
||||
|
59
packages/service/core/app/version/controller.ts
Normal file
59
packages/service/core/app/version/controller.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { AppSchema } from '@fastgpt/global/core/app/type';
|
||||
import { MongoAppVersion } from './schema';
|
||||
import { Types } from '../../../common/mongo';
|
||||
|
||||
export const getAppLatestVersion = async (appId: string, app?: AppSchema) => {
|
||||
const version = await MongoAppVersion.findOne({
|
||||
appId,
|
||||
isPublish: true
|
||||
})
|
||||
.sort({
|
||||
time: -1
|
||||
})
|
||||
.lean();
|
||||
|
||||
if (version) {
|
||||
return {
|
||||
versionId: version._id,
|
||||
nodes: version.nodes,
|
||||
edges: version.edges,
|
||||
chatConfig: version.chatConfig || app?.chatConfig || {}
|
||||
};
|
||||
}
|
||||
return {
|
||||
versionId: app?.pluginData?.nodeVersion,
|
||||
nodes: app?.modules || [],
|
||||
edges: app?.edges || [],
|
||||
chatConfig: app?.chatConfig || {}
|
||||
};
|
||||
};
|
||||
|
||||
export const getAppVersionById = async ({
|
||||
appId,
|
||||
versionId,
|
||||
app
|
||||
}: {
|
||||
appId: string;
|
||||
versionId?: string;
|
||||
app?: AppSchema;
|
||||
}) => {
|
||||
// 检查 versionId 是否符合 ObjectId 格式
|
||||
if (versionId && Types.ObjectId.isValid(versionId)) {
|
||||
const version = await MongoAppVersion.findOne({
|
||||
_id: versionId,
|
||||
appId
|
||||
}).lean();
|
||||
|
||||
if (version) {
|
||||
return {
|
||||
versionId: version._id,
|
||||
nodes: version.nodes,
|
||||
edges: version.edges,
|
||||
chatConfig: version.chatConfig || app?.chatConfig || {}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// If the version does not exist, the latest version is returned
|
||||
return getAppLatestVersion(appId, app);
|
||||
};
|
@@ -183,8 +183,18 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
});
|
||||
|
||||
// flat child tool response
|
||||
const childToolResponse = dispatchFlowResponse.map((item) => item.flowResponses).flat();
|
||||
const newVariables = dispatchFlowResponse[dispatchFlowResponse.length - 1]?.newVariables;
|
||||
let newVariables: Record<string, any> = props.variables;
|
||||
const childToolResponse = dispatchFlowResponse
|
||||
.map((item) => {
|
||||
// Computed new variables
|
||||
newVariables = {
|
||||
...newVariables,
|
||||
...item.newVariables
|
||||
};
|
||||
|
||||
return item.flowResponses;
|
||||
})
|
||||
.flat();
|
||||
|
||||
// concat tool usage
|
||||
const totalPointsUsage =
|
||||
|
@@ -38,7 +38,7 @@ export const dispatchLoop = async (props: Props): Promise<Response> => {
|
||||
const loopDetail: ChatHistoryItemResType[] = [];
|
||||
let assistantResponses: AIChatItemValueItemType[] = [];
|
||||
let totalPoints = 0;
|
||||
let newVariables: Record<string, any> = {};
|
||||
let newVariables: Record<string, any> = props.variables;
|
||||
|
||||
for await (const item of loopInputArray) {
|
||||
const response = await dispatchWorkFlow({
|
||||
|
@@ -26,7 +26,7 @@ type RunPluginResponse = DispatchNodeResultType<{}>;
|
||||
|
||||
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
|
||||
const {
|
||||
node: { pluginId },
|
||||
node: { pluginId, version },
|
||||
runningAppInfo,
|
||||
query,
|
||||
params: { system_forbid_stream = false, ...data } // Plugin input
|
||||
@@ -45,7 +45,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const plugin = await getChildAppRuntimeById(pluginId);
|
||||
const plugin = await getChildAppRuntimeById(pluginId, version);
|
||||
|
||||
const outputFilterMap =
|
||||
plugin.nodes
|
||||
|
@@ -16,7 +16,7 @@ import { chatValue2RuntimePrompt, runtimePrompt2ChatsValue } from '@fastgpt/glob
|
||||
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { authAppByTmbId } from '../../../../support/permission/app/auth';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { getAppLatestVersion } from '../../../app/controller';
|
||||
import { getAppVersionById } from '../../../app/version/controller';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
@@ -34,8 +34,7 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
runningAppInfo,
|
||||
histories,
|
||||
query,
|
||||
mode,
|
||||
node: { pluginId },
|
||||
node: { pluginId: appId, version },
|
||||
workflowStreamResponse,
|
||||
params,
|
||||
variables
|
||||
@@ -45,19 +44,23 @@ export const dispatchRunAppNode = async (props: Props): Promise<Response> => {
|
||||
if (!userChatInput) {
|
||||
return Promise.reject('Input is empty');
|
||||
}
|
||||
if (!pluginId) {
|
||||
if (!appId) {
|
||||
return Promise.reject('pluginId is empty');
|
||||
}
|
||||
|
||||
// Auth the app by tmbId(Not the user, but the workflow user)
|
||||
const { app: appData } = await authAppByTmbId({
|
||||
appId: pluginId,
|
||||
appId: appId,
|
||||
tmbId: runningAppInfo.tmbId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
const { nodes, edges, chatConfig } = await getAppLatestVersion(pluginId);
|
||||
const childStreamResponse = system_forbid_stream ? false : props.stream;
|
||||
const { nodes, edges, chatConfig } = await getAppVersionById({
|
||||
appId,
|
||||
versionId: version,
|
||||
app: appData
|
||||
});
|
||||
|
||||
const childStreamResponse = system_forbid_stream ? false : props.stream;
|
||||
// Auto line
|
||||
if (childStreamResponse) {
|
||||
workflowStreamResponse?.({
|
||||
|
@@ -77,8 +77,18 @@ try {
|
||||
// timer task. Get standard plan;Get free plan;Clear expired extract plan
|
||||
SubSchema.index({ type: 1, expiredTime: -1, currentSubLevel: 1 });
|
||||
|
||||
// unique
|
||||
SubSchema.index({ teamId: 1, type: 1, currentSubLevel: 1 }, { unique: true });
|
||||
// 修改后的唯一索引
|
||||
SubSchema.index(
|
||||
{
|
||||
teamId: 1,
|
||||
type: 1,
|
||||
currentSubLevel: 1
|
||||
},
|
||||
{
|
||||
unique: true,
|
||||
partialFilterExpression: { type: SubTypeEnum.standard }
|
||||
}
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -67,7 +67,6 @@
|
||||
"logs_message_total": "Total Messages",
|
||||
"logs_title": "Title",
|
||||
"mark_count": "Number of Marked Answers",
|
||||
"module.Confirm Sync": "Will update to the latest template configuration. Fields not in the template will be deleted (including all custom fields). It is recommended to copy a node first, then update the original node version.",
|
||||
"module.Custom Title Tip": "This title will be displayed during the conversation.",
|
||||
"module.No Modules": "No Plugins Found",
|
||||
"module.type": "\"{{type}}\" type\n{{description}}",
|
||||
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"Array_element": "Array element",
|
||||
"Code": "Code",
|
||||
"Confirm_sync_node": "It will be updated to the latest node configuration and fields that do not exist in the template will be deleted (including all custom fields).\n\nIf the fields are complex, it is recommended that you copy a node first and then update the original node to facilitate parameter copying.",
|
||||
"Quote_prompt_setting": "Quote prompt",
|
||||
"add_new_input": "Add New Input",
|
||||
"add_new_output": "New output",
|
||||
|
@@ -67,7 +67,6 @@
|
||||
"logs_message_total": "消息总数",
|
||||
"logs_title": "标题",
|
||||
"mark_count": "标注答案数量",
|
||||
"module.Confirm Sync": "将会更新至最新的模板配置,不存在模板中的字段将会被删除(包括所有自定义字段),建议您先复制一份节点,再更新原来节点的版本。",
|
||||
"module.Custom Title Tip": "该标题名字会展示在对话过程中",
|
||||
"module.No Modules": "没找到插件",
|
||||
"module.type": "\"{{type}}\"类型\n{{description}}",
|
||||
|
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"Array_element": "数组元素",
|
||||
"Code": "代码",
|
||||
"Confirm_sync_node": "将会更新至最新的节点配置,不存在模板中的字段将会被删除(包括所有自定义字段)。\n如果字段较为复杂,建议您先复制一份节点,再更新原来的节点,便于参数复制。",
|
||||
"Quote_prompt_setting": "引用提示词配置",
|
||||
"add_new_input": "新增输入",
|
||||
"add_new_output": "新增输出",
|
||||
|
@@ -24,9 +24,6 @@ const I18nLngSelector = () => {
|
||||
|
||||
return (
|
||||
<MySelect
|
||||
_hover={{
|
||||
bg: 'myGray.200'
|
||||
}}
|
||||
value={i18n.language}
|
||||
list={list}
|
||||
onchange={(val: any) => {
|
||||
|
@@ -2,7 +2,7 @@ import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/nex
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { authApp } from '@fastgpt/service/support/permission/app/auth';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
|
||||
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
|
||||
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
|
||||
|
@@ -21,7 +21,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<
|
||||
versionName
|
||||
} = req.body as PostPublishAppProps;
|
||||
|
||||
const { app, tmbId } = await authApp({ appId, req, per: WritePermissionVal, authToken: true });
|
||||
const { tmbId } = await authApp({ appId, req, per: WritePermissionVal, authToken: true });
|
||||
|
||||
const { nodes: formatNodes } = beforeUpdateAppFormat({ nodes });
|
||||
|
||||
|
@@ -6,7 +6,7 @@ import { getChatModelNameListByModules } from '@/service/core/app/workflow';
|
||||
import type { InitChatProps, InitChatResponse } from '@/global/core/chat/api.d';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
@@ -15,7 +15,7 @@ async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
): Promise<InitChatResponse | void> {
|
||||
let { appId, chatId, loadCustomFeedbacks } = req.query as InitChatProps;
|
||||
let { appId, chatId } = req.query as InitChatProps;
|
||||
|
||||
if (!appId) {
|
||||
return jsonRes(res, {
|
||||
|
@@ -9,7 +9,7 @@ import { MongoApp } from '@fastgpt/service/core/app/schema';
|
||||
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
|
@@ -9,7 +9,7 @@ import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
|
||||
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
|
||||
|
@@ -46,7 +46,7 @@ import { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import {
|
||||
|
@@ -72,7 +72,8 @@ const SaveButton = ({
|
||||
toast({
|
||||
status: 'success',
|
||||
title: t('app:saved_success'),
|
||||
position: 'top-right'
|
||||
position: 'top-right',
|
||||
isClosable: true
|
||||
});
|
||||
onClose();
|
||||
setIsSave(false);
|
||||
|
@@ -92,10 +92,6 @@ const NodeCard = (props: Props) => {
|
||||
return { node, parentNode };
|
||||
}, [nodeList, nodeId]);
|
||||
|
||||
const { openConfirm: onOpenConfirmSync, ConfirmModal: ConfirmSyncModal } = useConfirm({
|
||||
content: t('app:module.Confirm Sync')
|
||||
});
|
||||
|
||||
const { data: nodeTemplate, runAsync: getNodeLatestTemplate } = useRequest2(
|
||||
async () => {
|
||||
if (
|
||||
@@ -125,6 +121,10 @@ const NodeCard = (props: Props) => {
|
||||
manual: false
|
||||
}
|
||||
);
|
||||
|
||||
const { openConfirm: onOpenConfirmSync, ConfirmModal: ConfirmSyncModal } = useConfirm({
|
||||
content: t('workflow:Confirm_sync_node')
|
||||
});
|
||||
const hasNewVersion = nodeTemplate && nodeTemplate.version !== node?.version;
|
||||
|
||||
const { runAsync: onClickSyncVersion } = useRequest2(
|
||||
@@ -264,7 +264,6 @@ const NodeCard = (props: Props) => {
|
||||
<MenuRender nodeId={nodeId} menuForbid={menuForbid} nodeList={nodeList} />
|
||||
<NodeIntro nodeId={nodeId} intro={intro} />
|
||||
</Box>
|
||||
<ConfirmSyncModal />
|
||||
</Box>
|
||||
);
|
||||
}, [
|
||||
@@ -282,11 +281,11 @@ const NodeCard = (props: Props) => {
|
||||
menuForbid,
|
||||
nodeList,
|
||||
intro,
|
||||
ConfirmSyncModal,
|
||||
onChangeNode,
|
||||
onOpenCustomTitleModal,
|
||||
toast
|
||||
]);
|
||||
|
||||
const RenderHandle = useMemo(() => {
|
||||
return (
|
||||
<>
|
||||
@@ -342,6 +341,7 @@ const NodeCard = (props: Props) => {
|
||||
{RenderHandle}
|
||||
{RenderToolHandle}
|
||||
|
||||
<ConfirmSyncModal />
|
||||
<EditTitleModal maxLength={20} />
|
||||
</Flex>
|
||||
);
|
||||
@@ -384,7 +384,12 @@ const MenuRender = React.memo(function MenuRender({
|
||||
pluginId: node.data.pluginId,
|
||||
version: node.data.version
|
||||
};
|
||||
return state.concat(
|
||||
|
||||
return [
|
||||
...state.map((item) => ({
|
||||
...item,
|
||||
selected: false
|
||||
})),
|
||||
storeNode2FlowNode({
|
||||
item: {
|
||||
flowNodeType: template.flowNodeType,
|
||||
@@ -403,7 +408,7 @@ const MenuRender = React.memo(function MenuRender({
|
||||
parentNodeId: undefined,
|
||||
t
|
||||
})
|
||||
);
|
||||
];
|
||||
});
|
||||
},
|
||||
[computedNewNodeName, setNodes, t]
|
||||
|
@@ -55,17 +55,6 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
|
||||
defaultValue: true
|
||||
});
|
||||
|
||||
const checkIpInChina = useCallback(
|
||||
() =>
|
||||
GET(ipDetectURL).then((res: any) => {
|
||||
const country = res?.country;
|
||||
if (country && country === '中国' && res.city !== '中国香港') {
|
||||
onOpenRedirect();
|
||||
}
|
||||
}),
|
||||
[onOpenRedirect]
|
||||
);
|
||||
|
||||
const loginSuccess = useCallback(
|
||||
(res: ResLogin) => {
|
||||
// init store
|
||||
@@ -101,10 +90,27 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
|
||||
);
|
||||
}, [feConfigs.oauth]);
|
||||
|
||||
const checkIpInChina = useCallback(async () => {
|
||||
try {
|
||||
const res = await GET<any>(ipDetectURL);
|
||||
const country = res?.country;
|
||||
if (
|
||||
country &&
|
||||
country === '中国' &&
|
||||
res.prov !== '中国香港' &&
|
||||
res.prov !== '中国澳门' &&
|
||||
res.prov !== '中国台湾'
|
||||
) {
|
||||
onOpenRedirect();
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}, [onOpenRedirect]);
|
||||
useMount(() => {
|
||||
clearToken();
|
||||
ChineseRedirectUrl && showRedirect && checkIpInChina();
|
||||
router.prefetch('/app/list');
|
||||
ChineseRedirectUrl && showRedirect && checkIpInChina();
|
||||
});
|
||||
|
||||
return (
|
||||
|
@@ -2,10 +2,12 @@ import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
|
||||
export const getChatModelNameListByModules = (nodes: StoreNodeItemType[]): string[] => {
|
||||
return nodes
|
||||
const modelList = nodes
|
||||
.map((item) => {
|
||||
const model = item.inputs.find((input) => input.key === NodeInputKeyEnum.aiModel)?.value;
|
||||
return global.llmModels.find((item) => item.model === model)?.name || '';
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
return Array.from(new Set(modelList));
|
||||
};
|
||||
|
2
projects/app/src/types/index.d.ts
vendored
2
projects/app/src/types/index.d.ts
vendored
@@ -22,7 +22,7 @@ export type RequestPaging = { pageNum: number; pageSize: number; [key]: any };
|
||||
declare global {
|
||||
var qaQueueLen: number;
|
||||
var vectorQueueLen: number;
|
||||
var geoip2: any;
|
||||
|
||||
interface Window {
|
||||
grecaptcha: any;
|
||||
QRCode: any;
|
||||
|
@@ -7,6 +7,7 @@ import {
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
import {
|
||||
chatHistoryValueDesc,
|
||||
defaultNodeVersion,
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '@fastgpt/global/core/workflow/node/constant';
|
||||
@@ -195,7 +196,7 @@ export function form2AppWorkflow(
|
||||
x: 918.5901682164496,
|
||||
y: -227.11542247619582
|
||||
},
|
||||
version: '481',
|
||||
version: DatasetSearchModule.version,
|
||||
inputs: [
|
||||
{
|
||||
key: 'datasets',
|
||||
@@ -377,7 +378,8 @@ export function form2AppWorkflow(
|
||||
x: 500 + 500 * (i + 1),
|
||||
y: 545
|
||||
},
|
||||
version: tool.version,
|
||||
// 这里不需要固定版本,给一个不存在的版本,每次都会用最新版
|
||||
version: defaultNodeVersion,
|
||||
inputs: tool.inputs.map((input) => {
|
||||
// Special key value
|
||||
if (input.key === NodeInputKeyEnum.forbidStream) {
|
||||
|
110
python/ocr/surya/README.md
Normal file
110
python/ocr/surya/README.md
Normal file
File diff suppressed because one or more lines are too long
@@ -19,7 +19,8 @@ from surya.model.recognition.model import load_model as load_rec_model
|
||||
from surya.model.recognition.processor import load_processor as load_rec_processor
|
||||
from surya.ocr import run_ocr
|
||||
from surya.schema import OCRResult
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings("ignore", category=FutureWarning, module="transformers")
|
||||
app = FastAPI()
|
||||
security = HTTPBearer()
|
||||
env_bearer_token = None
|
||||
@@ -100,40 +101,53 @@ class Chat(object):
|
||||
return string_result
|
||||
|
||||
def query_ocr(self, image_base64: str,
|
||||
sorted: bool) -> List[OCRResult] | str:
|
||||
sorted: bool) -> str:
|
||||
if image_base64 is None or len(image_base64) == 0:
|
||||
return []
|
||||
image = Chat.base64_to_image(image_base64)
|
||||
return ""
|
||||
try:
|
||||
image = Chat.base64_to_image(image_base64)
|
||||
ocr_result = self.surya.run(image)
|
||||
result = []
|
||||
|
||||
ocr_result = self.surya.run(image)
|
||||
result = []
|
||||
for text_line in ocr_result[0].text_lines:
|
||||
result.append(text_line.text)
|
||||
|
||||
for text_line in ocr_result[0].text_lines:
|
||||
result.append({"text": text_line.text, "bbox": text_line.bbox})
|
||||
if sorted:
|
||||
result = Chat.sort_text_by_bbox(result)
|
||||
if sorted:
|
||||
result = self.sort_text_lines(result)
|
||||
|
||||
torch_gc()
|
||||
return result
|
||||
# 将所有文本行合并成一个字符串,用换行符分隔
|
||||
final_result = "\n".join(result)
|
||||
|
||||
torch_gc()
|
||||
return final_result
|
||||
except Exception as e:
|
||||
logging.error(f"OCR 处理失败: {e}")
|
||||
raise HTTPException(status_code=400, detail=f"OCR 处理失败: {str(e)}")
|
||||
|
||||
@app.post('/v1/surya_ocr')
|
||||
@staticmethod
|
||||
def sort_text_lines(text_lines: List[str]) -> List[str]:
|
||||
# 这里可以实现自定义的排序逻辑
|
||||
# 目前只是简单地返回原始列表,因为我们没有位置信息来进行排序
|
||||
return text_lines
|
||||
|
||||
@app.post('/v1/ocr/text')
|
||||
async def handle_post_request(
|
||||
image_req: ImageReq,
|
||||
credentials: HTTPAuthorizationCredentials = Security(security)):
|
||||
token = credentials.credentials
|
||||
if env_bearer_token is not None and token != env_bearer_token:
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
raise HTTPException(status_code=401, detail="无效的令牌")
|
||||
chat = Chat()
|
||||
try:
|
||||
results = []
|
||||
for image_base64 in image_req.images:
|
||||
results.append(chat.query_ocr(image_base64, image_req.sorted))
|
||||
return {"error": "success", "results": results}
|
||||
return {"error": None, "results": results}
|
||||
except HTTPException as he:
|
||||
raise he
|
||||
except Exception as e:
|
||||
logging.error(f"识别报错:{e}")
|
||||
return {"error": "识别出错"}
|
||||
|
||||
raise HTTPException(status_code=500, detail=f"识别出错: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
env_bearer_token = os.getenv("ACCESS_TOKEN")
|
@@ -1,120 +0,0 @@
|
||||
# 接入Surya OCR文本检测
|
||||
|
||||
## 源码部署
|
||||
|
||||
### 1. 安装环境
|
||||
|
||||
- Python 3.9+
|
||||
- CUDA 11.8
|
||||
- 科学上网环境
|
||||
|
||||
### 2. 安装依赖
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
### 3. 下载模型
|
||||
|
||||
代码首次运行时会自动从huggingface下载模型,可跳过以下步骤。
|
||||
也可以手动下载模型,在对应代码目录下clone模型
|
||||
|
||||
```sh
|
||||
mkdir vikp && cd vikp
|
||||
|
||||
git lfs install
|
||||
|
||||
git clone https://huggingface.co/vikp/surya_det3
|
||||
# 镜像下载 https://hf-mirror.com/vikp/surya_det3
|
||||
|
||||
git clone https://huggingface.co/vikp/surya_rec2
|
||||
# 镜像下载 https://hf-mirror.com/vikp/surya_rec2
|
||||
```
|
||||
|
||||
最终手动下载的目录结构如下:
|
||||
|
||||
```
|
||||
vikp/surya_det3
|
||||
vikp/surya_rec2
|
||||
app.py
|
||||
Dockerfile
|
||||
requirements.txt
|
||||
```
|
||||
|
||||
### 4. 运行代码
|
||||
|
||||
```bash
|
||||
python app.py
|
||||
```
|
||||
|
||||
对应请求地址为
|
||||
`http://0.0.0.0:7230/v1/surya_ocr`
|
||||
|
||||
### 5. 测试
|
||||
|
||||
```python
|
||||
import requests
|
||||
import base64
|
||||
|
||||
IMAGE_PATH = "your/path/to/image.png"
|
||||
ACCESS_TOKEN = "your_access_token"
|
||||
|
||||
with open(IMAGE_PATH, 'rb') as img_file:
|
||||
encoded_string = base64.b64encode(img_file.read())
|
||||
encoded_image = encoded_string.decode('utf-8')
|
||||
data = {"images": [encoded_image], "sorted": True}
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {ACCESS_TOKEN}"
|
||||
}
|
||||
res = requests.post(url="http://0.0.0.0:7230/v1/surya_ocr",
|
||||
headers=headers,
|
||||
json=data)
|
||||
|
||||
print(res.text)
|
||||
```
|
||||
|
||||
## docker部署
|
||||
|
||||
### 镜像获取
|
||||
|
||||
**本地编译镜像:**
|
||||
```bash
|
||||
docker build -t surya_ocr:v0.1 .
|
||||
```
|
||||
|
||||
**或拉取线上镜像:**
|
||||
Todo:待发布
|
||||
|
||||
### docker-compose.yml示例
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
surya-ocr:
|
||||
image: surya_ocr:v0.1
|
||||
container_name: surya-ocr
|
||||
# GPU运行环境,如果宿主机未安装,将deploy配置隐藏即可
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: all
|
||||
capabilities: [gpu]
|
||||
ports:
|
||||
- 7230:7230
|
||||
environment:
|
||||
- BATCH_SIZE=32
|
||||
- ACCESS_TOKEN=YOUR_ACCESS_TOKEN
|
||||
- LANGS='["zh","en"]'
|
||||
```
|
||||
**环境变量:**
|
||||
```
|
||||
BATCH_SIZE:根据实际内存/显存情况配置,每个batch约占用40MB的VRAM,cpu默认32,mps默认64,cuda默认512
|
||||
ACCESS_TOKEN:服务的access_token
|
||||
LANGS:支持的语言列表,默认["zh","en"]
|
||||
```
|
||||
|
||||
## 接入FastGPT
|
||||
|
||||
Todo: 待补充
|
Reference in New Issue
Block a user