perf: mcp save raw schema (#5030)

* perf: mcp save raw schema

* fix: test

* code

* perf: json schema test

* perf: mcp
This commit is contained in:
Archer
2025-06-13 18:46:55 +08:00
committed by GitHub
parent 0914eacb5e
commit 9d6a48a62f
35 changed files with 424 additions and 415 deletions

View File

@@ -22,6 +22,9 @@ weight: 788
3. 将文本分块移至 worker 线程,避免阻塞。
4. 展示更多套餐用量信息。
5. 优化输入框样式,桌面和移动端的语音输入样式更新。
6. MCP 工具调用,使用 Raw schema 进行工具调用,保障完整性。
7. 删除知识库文件时,如果文件不存在,不会阻断删除。
8. 升级 MCP SDK兼容最新的 HTTPStreamable。
## 🐛 修复
@@ -30,3 +33,4 @@ weight: 788
3. 登录页可能存在的 XSS 攻击。
4. 输入框语音输入时候会丢失文件列表的问题。
5. 知识库文档中图片 TTL 字段未清除,导致图片过期。
6. MCP 工具存储时,未转义 int 类型数据。

View File

@@ -0,0 +1,86 @@
import { WorkflowIOValueTypeEnum } from '../workflow/constants';
import { FlowNodeInputTypeEnum } from '../workflow/node/constant';
import type { FlowNodeInputItemType } from '../workflow/type/io';
type SchemaInputValueType = 'string' | 'number' | 'integer' | 'boolean' | 'array' | 'object';
export type JsonSchemaPropertiesItemType = {
description?: string;
type: SchemaInputValueType;
enum?: string[];
minimum?: number;
maximum?: number;
items?: { type: SchemaInputValueType };
};
export type JSONSchemaInputType = {
type: SchemaInputValueType;
properties?: Record<string, JsonSchemaPropertiesItemType>;
required?: string[];
};
const getNodeInputTypeFromSchemaInputType = ({
type,
arrayItems
}: {
type: SchemaInputValueType;
arrayItems?: { type: SchemaInputValueType };
}) => {
if (type === 'string') return WorkflowIOValueTypeEnum.string;
if (type === 'number') return WorkflowIOValueTypeEnum.number;
if (type === 'integer') return WorkflowIOValueTypeEnum.number;
if (type === 'boolean') return WorkflowIOValueTypeEnum.boolean;
if (type === 'object') return WorkflowIOValueTypeEnum.object;
if (!arrayItems) return WorkflowIOValueTypeEnum.arrayAny;
const itemType = arrayItems.type;
if (itemType === 'string') return WorkflowIOValueTypeEnum.arrayString;
if (itemType === 'number') return WorkflowIOValueTypeEnum.arrayNumber;
if (itemType === 'integer') return WorkflowIOValueTypeEnum.arrayNumber;
if (itemType === 'boolean') return WorkflowIOValueTypeEnum.arrayBoolean;
if (itemType === 'object') return WorkflowIOValueTypeEnum.arrayObject;
return WorkflowIOValueTypeEnum.arrayAny;
};
const getNodeInputRenderTypeFromSchemaInputType = ({
type,
enum: enumList,
minimum,
maximum
}: JsonSchemaPropertiesItemType) => {
if (enumList && enumList.length > 0) {
return {
value: enumList[0],
renderTypeList: [FlowNodeInputTypeEnum.select],
list: enumList.map((item) => ({ label: item, value: item }))
};
}
if (type === 'string') {
return {
renderTypeList: [FlowNodeInputTypeEnum.input]
};
}
if (type === 'number') {
return {
renderTypeList: [FlowNodeInputTypeEnum.numberInput],
max: maximum,
min: minimum
};
}
if (type === 'boolean') {
return {
renderTypeList: [FlowNodeInputTypeEnum.switch]
};
}
return { renderTypeList: [FlowNodeInputTypeEnum.JSONEditor] };
};
export const jsonSchema2NodeInput = (jsonSchema: JSONSchemaInputType): FlowNodeInputItemType[] => {
return Object.entries(jsonSchema?.properties || {}).map(([key, value]) => ({
key,
label: key,
valueType: getNodeInputTypeFromSchemaInputType({ type: value.type, arrayItems: value.items }),
description: value.description,
toolDescription: value.description || key,
required: jsonSchema?.required?.includes(key),
...getNodeInputRenderTypeFromSchemaInputType(value)
}));
};

View File

@@ -0,0 +1,12 @@
import type { McpToolConfigType } from '../type';
export type McpToolSetDataType = {
url: string;
headerSecret?: StoreSecretValueType;
toolList: McpToolConfigType[];
};
export type McpToolDataType = McpToolConfigType & {
url: string;
headerSecret?: StoreSecretValueType;
};

View File

@@ -1,4 +1,8 @@
import { NodeOutputKeyEnum, WorkflowIOValueTypeEnum } from '../../workflow/constants';
import {
NodeInputKeyEnum,
NodeOutputKeyEnum,
WorkflowIOValueTypeEnum
} from '../../workflow/constants';
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
@@ -9,6 +13,7 @@ import { type McpToolConfigType } from '../type';
import { i18nT } from '../../../../web/i18n/utils';
import { type RuntimeNodeItemType } from '../../workflow/runtime/type';
import { type StoreSecretValueType } from '../../../common/secret/type';
import { jsonSchema2NodeInput } from '../jsonschema';
export const getMCPToolSetRuntimeNode = ({
url,
@@ -30,14 +35,14 @@ export const getMCPToolSetRuntimeNode = ({
intro: 'MCP Tools',
inputs: [
{
key: 'toolSetData',
key: NodeInputKeyEnum.toolSetData,
label: 'Tool Set Data',
valueType: WorkflowIOValueTypeEnum.object,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
value: {
url,
toolList,
headerSecret
headerSecret,
toolList
}
}
],
@@ -65,7 +70,7 @@ export const getMCPToolRuntimeNode = ({
intro: tool.description,
inputs: [
{
key: 'toolData',
key: NodeInputKeyEnum.toolData,
label: 'Tool Data',
valueType: WorkflowIOValueTypeEnum.object,
renderTypeList: [FlowNodeInputTypeEnum.hidden],
@@ -75,23 +80,7 @@ export const getMCPToolRuntimeNode = ({
headerSecret
}
},
...Object.entries(tool.inputSchema?.properties || {}).map(([key, value]) => ({
key,
label: key,
valueType: value.type as WorkflowIOValueTypeEnum, // TODO: 这里需要做一个映射
description: value.description,
toolDescription: value.description || key,
required: tool.inputSchema?.required?.includes(key) || false,
renderTypeList: [
value.type === 'string'
? FlowNodeInputTypeEnum.input
: value.type === 'number'
? FlowNodeInputTypeEnum.numberInput
: value.type === 'boolean'
? FlowNodeInputTypeEnum.switch
: FlowNodeInputTypeEnum.JSONEditor
]
}))
...jsonSchema2NodeInput(tool.inputSchema)
],
outputs: [
{

View File

@@ -15,6 +15,7 @@ import type { ParentIdType } from '../../common/parentFolder/type';
import { FlowNodeInputTypeEnum } from '../../core/workflow/node/constant';
import type { WorkflowTemplateBasicType } from '@fastgpt/global/core/workflow/type';
import type { SourceMemberType } from '../../support/user/type';
import type { JSONSchemaInputType } from './jsonschema';
export type AppSchema = {
_id: string;
@@ -110,11 +111,7 @@ export type AppSimpleEditFormType = {
export type McpToolConfigType = {
name: string;
description: string;
inputSchema: {
type: string;
properties?: Record<string, { type: string; description?: string }>;
required?: string[];
};
inputSchema: JSONSchemaInputType;
};
/* app chat config type */

View File

@@ -1,4 +1,5 @@
import { i18nT } from '../../../web/i18n/utils';
import type { JsonSchemaPropertiesItemType } from '../app/jsonschema';
export enum FlowNodeTemplateTypeEnum {
systemInput = 'systemInput',
@@ -40,7 +41,11 @@ export enum WorkflowIOValueTypeEnum {
selectApp = 'selectApp'
}
export const toolValueTypeList = [
export const toolValueTypeList: {
label: string;
value: WorkflowIOValueTypeEnum;
jsonSchema: JsonSchemaPropertiesItemType;
}[] = [
{
label: WorkflowIOValueTypeEnum.string,
value: WorkflowIOValueTypeEnum.string,
@@ -93,6 +98,14 @@ export const toolValueTypeList = [
}
}
];
export const valueTypeJsonSchemaMap: Record<string, JsonSchemaPropertiesItemType> =
toolValueTypeList.reduce(
(acc, item) => {
acc[item.value] = item.jsonSchema;
return acc;
},
{} as Record<string, JsonSchemaPropertiesItemType>
);
/* reg: modulename key */
export enum NodeInputKeyEnum {
@@ -231,7 +244,11 @@ export enum NodeInputKeyEnum {
// comment
commentText = 'commentText',
commentSize = 'commentSize'
commentSize = 'commentSize',
// Tool
toolData = 'system_toolData',
toolSetData = 'system_toolSetData'
}
export enum NodeOutputKeyEnum {

View File

@@ -1,7 +1,10 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "."
"baseUrl": ".",
"paths": {
"@fastgpt/global/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts"]
}

View File

@@ -14,7 +14,10 @@
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"baseUrl": "."
"baseUrl": ".",
"paths": {
"@fastgpt/plugins/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../**/*.d.ts"],
"exclude": ["node_modules"]

View File

@@ -167,7 +167,15 @@ export async function delFileByFileIdList({
const bucket = getGridBucket(bucketName);
for await (const fileId of fileIdList) {
await bucket.delete(new Types.ObjectId(fileId));
try {
await bucket.delete(new Types.ObjectId(String(fileId)));
} catch (error: any) {
if (typeof error?.message === 'string' && error.message.includes('File not found')) {
addLog.warn('File not found', { fileId });
return;
}
return Promise.reject(error);
}
}
});
}

View File

@@ -4,10 +4,10 @@ import { type StoreSecretValueType } from '@fastgpt/global/common/secret/type';
import { HeaderSecretTypeEnum } from '@fastgpt/global/common/secret/constants';
export const storeSecretValue = (
storeSecret: StoreSecretValueType
storeSecret: StoreSecretValueType = {}
): Record<string, SecretValueType> => {
return Object.fromEntries(
Object.entries(storeSecret || {}).map(([key, value]) => [
Object.entries(storeSecret).map(([key, value]) => [
key,
{
secret: encryptSecret(value.value),

View File

@@ -10,7 +10,11 @@ import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/c
import { createChatCompletion } from '../../../ai/config';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/workflow/template/system/contextExtract/type';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeOutputKeyEnum, toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import {
NodeOutputKeyEnum,
toolValueTypeList,
valueTypeJsonSchemaMap
} from '@fastgpt/global/core/workflow/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
import { sliceJsonStr } from '@fastgpt/global/common/string/tools';
@@ -164,9 +168,10 @@ const getJsonSchema = ({ params: { extractKeys } }: ActionProps) => {
}
> = {};
extractKeys.forEach((item) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
)?.jsonSchema;
const jsonSchema = item.valueType
? valueTypeJsonSchemaMap[item.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
properties[item.key] = {
...jsonSchema,
description: item.desc,

View File

@@ -37,7 +37,7 @@ import {
removeDatasetCiteText,
parseLLMStreamResponse
} from '../../../../ai/utils';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { toolValueTypeList, valueTypeJsonSchemaMap } from '@fastgpt/global/core/workflow/constants';
import { type WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
@@ -162,6 +162,14 @@ export const runToolWithFunctionCall = async (
const assistantResponses = response?.assistantResponses || [];
const functions: ChatCompletionCreateParams.Function[] = toolNodes.map((item) => {
if (item.jsonSchema) {
return {
name: item.nodeId,
description: item.intro,
parameters: item.jsonSchema
};
}
const properties: Record<
string,
{
@@ -172,9 +180,9 @@ export const runToolWithFunctionCall = async (
}
> = {};
item.toolParams.forEach((item) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
).jsonSchema;
const jsonSchema = item.valueType
? valueTypeJsonSchemaMap[item.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
properties[item.key] = {
...jsonSchema,

View File

@@ -1,4 +1,4 @@
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type {
ChatDispatchProps,
@@ -22,7 +22,7 @@ import { formatModelChars2Points } from '../../../../../support/wallet/usage/uti
import { getHistoryPreview } from '@fastgpt/global/core/chat/utils';
import { runToolWithFunctionCall } from './functionCall';
import { runToolWithPromptCall } from './promptCall';
import { getNanoid, replaceVariable } from '@fastgpt/global/common/string/tools';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { getMultiplePrompt, Prompt_Tool_Call } from './constants';
import { filterToolResponseToPreview } from './utils';
import { type InteractiveNodeResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
@@ -32,6 +32,9 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { getDocumentQuotePrompt } from '@fastgpt/global/core/ai/prompt/AIChat';
import { postTextCensor } from '../../../../chat/postTextCensor';
import type { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io';
import type { McpToolDataType } from '@fastgpt/global/core/app/mcpTools/type';
import type { JSONSchemaInputType } from '@fastgpt/global/core/app/jsonschema';
type Response = DispatchNodeResultType<{
[NodeOutputKeyEnum.answerText]: string;
@@ -78,10 +81,24 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
})
.filter(Boolean)
.map<ToolNodeItemType>((tool) => {
const toolParams = tool?.inputs.filter((input) => !!input.toolDescription) || [];
const toolParams: FlowNodeInputItemType[] = [];
// Raw json schema(MCP tool)
let jsonSchema: JSONSchemaInputType | undefined = undefined;
tool?.inputs.forEach((input) => {
if (input.toolDescription) {
toolParams.push(input);
}
if (input.key === NodeInputKeyEnum.toolData || input.key === 'toolData') {
const value = input.value as McpToolDataType;
jsonSchema = value.inputSchema;
}
});
return {
...(tool as RuntimeNodeItemType),
toolParams
toolParams,
jsonSchema
};
});
@@ -172,28 +189,26 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
reserveId: false
// reserveTool: !!toolModel.toolChoice
});
if (toolModel.toolChoice) {
return runToolWithToolChoice({
...props,
const requestParams = {
runtimeNodes,
runtimeEdges,
toolNodes,
toolModel,
maxRunToolTimes: 30,
messages: adaptMessages,
interactiveEntryToolParams: lastInteractive?.toolParams
};
if (toolModel.toolChoice) {
return runToolWithToolChoice({
...props,
...requestParams,
maxRunToolTimes: 30
});
}
if (toolModel.functionCall) {
return runToolWithFunctionCall({
...props,
runtimeNodes,
runtimeEdges,
toolNodes,
toolModel,
messages: adaptMessages,
interactiveEntryToolParams: lastInteractive?.toolParams
...requestParams
});
}
@@ -218,12 +233,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
return runToolWithPromptCall({
...props,
runtimeNodes,
runtimeEdges,
toolNodes,
toolModel,
messages: adaptMessages,
interactiveEntryToolParams: lastInteractive?.toolParams
...requestParams
});
})();

View File

@@ -38,7 +38,7 @@ import {
parseLLMStreamResponse
} from '../../../../ai/utils';
import { type WorkflowResponseType } from '../../type';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { toolValueTypeList, valueTypeJsonSchemaMap } from '@fastgpt/global/core/workflow/constants';
import { type WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
@@ -166,6 +166,14 @@ export const runToolWithPromptCall = async (
const toolsPrompt = JSON.stringify(
toolNodes.map((item) => {
if (item.jsonSchema) {
return {
toolId: item.nodeId,
description: item.intro,
parameters: item.jsonSchema
};
}
const properties: Record<
string,
{
@@ -176,9 +184,9 @@ export const runToolWithPromptCall = async (
}
> = {};
item.toolParams.forEach((item) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
).jsonSchema;
const jsonSchema = item.valueType
? valueTypeJsonSchemaMap[item.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
properties[item.key] = {
...jsonSchema,

View File

@@ -33,7 +33,7 @@ import {
parseLLMStreamResponse
} from '../../../../ai/utils';
import { getNanoid, sliceStrStartEnd } from '@fastgpt/global/common/string/tools';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { toolValueTypeList, valueTypeJsonSchemaMap } from '@fastgpt/global/core/workflow/constants';
import { type WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
@@ -211,6 +211,17 @@ export const runToolWithToolChoice = async (
const assistantResponses = response?.assistantResponses || [];
const tools: ChatCompletionTool[] = toolNodes.map((item) => {
if (item.jsonSchema) {
return {
type: 'function',
function: {
name: item.nodeId,
description: item.intro || item.name,
parameters: item.jsonSchema
}
};
}
const properties: Record<
string,
{
@@ -224,9 +235,10 @@ export const runToolWithToolChoice = async (
}
> = {};
item.toolParams.forEach((item) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
)?.jsonSchema;
const jsonSchema = item.valueType
? valueTypeJsonSchemaMap[item.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
properties[item.key] = {
...jsonSchema,
description: item.toolDescription || '',

View File

@@ -16,6 +16,7 @@ import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import type { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model';
import type { JSONSchemaInputType } from '@fastgpt/global/core/app/jsonschema';
export type DispatchToolModuleProps = ModuleDispatchProps<{
[NodeInputKeyEnum.history]?: ChatItemType[];
@@ -51,4 +52,5 @@ export type RunToolResponse = {
};
export type ToolNodeItemType = RuntimeNodeItemType & {
toolParams: RuntimeNodeItemType['inputs'];
jsonSchema?: JSONSchemaInputType;
};

View File

@@ -3,18 +3,16 @@ import {
type ModuleDispatchProps
} from '@fastgpt/global/core/workflow/runtime/type';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { MCPClient } from '../../../app/mcp';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { type StoreSecretValueType } from '@fastgpt/global/common/secret/type';
import { getSecretValue } from '../../../../common/secret/utils';
import type { McpToolDataType } from '@fastgpt/global/core/app/mcpTools/type';
type RunToolProps = ModuleDispatchProps<{
toolData: {
name: string;
url: string;
headerSecret: StoreSecretValueType;
};
toolData?: McpToolDataType;
[NodeInputKeyEnum.toolData]: McpToolDataType;
}>;
type RunToolResponse = DispatchNodeResultType<{
@@ -27,13 +25,13 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
node: { avatar }
} = props;
const { toolData, ...restParams } = params;
const { name: toolName, url } = toolData;
const { toolData, system_toolData, ...restParams } = params;
const { name: toolName, url, headerSecret } = toolData || system_toolData;
const mcpClient = new MCPClient({
url,
headers: getSecretValue({
storeSecret: toolData.headerSecret
storeSecret: headerSecret
})
});

View File

@@ -14,6 +14,7 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import { type SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { getMCPToolRuntimeNode } from '@fastgpt/global/core/app/mcpTools/utils';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import type { McpToolSetDataType } from '@fastgpt/global/core/app/mcpTools/type';
export const getWorkflowResponseWrite = ({
res,
@@ -161,11 +162,13 @@ export const rewriteRuntimeWorkFlow = (
for (const toolSetNode of toolSetNodes) {
nodeIdsToRemove.add(toolSetNode.nodeId);
const toolList =
toolSetNode.inputs.find((input) => input.key === 'toolSetData')?.value?.toolList || [];
const url = toolSetNode.inputs.find((input) => input.key === 'toolSetData')?.value?.url;
const headerSecret = toolSetNode.inputs.find((input) => input.key === 'toolSetData')?.value
?.headerSecret;
const toolSetValue = toolSetNode.inputs[0]?.value as McpToolSetDataType | undefined;
if (!toolSetValue) continue;
const toolList = toolSetValue.toolList;
const url = toolSetValue.url;
const headerSecret = toolSetValue.headerSecret;
const incomingEdges = edges.filter((edge) => edge.target === toolSetNode.nodeId);

View File

@@ -3,7 +3,7 @@
"version": "1.0.0",
"dependencies": {
"@fastgpt/global": "workspace:*",
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@node-rs/jieba": "2.0.1",
"@xmldom/xmldom": "^0.8.10",
"@zilliz/milvus2-sdk-node": "2.4.2",

View File

@@ -1,7 +1,10 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "."
"baseUrl": ".",
"paths": {
"@fastgpt/servive/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../../**/*.d.ts"]
}

View File

@@ -15,7 +15,7 @@
"channel_status_disabled": "Disabled",
"channel_status_enabled": "Enable",
"channel_status_unknown": "unknown",
"channel_type": "Manufacturer",
"channel_type": "Protocol Type",
"chart_mode_cumulative": "Cumulative",
"chart_mode_incremental": "Incremental",
"clear_model": "Clear the model",
@@ -74,7 +74,7 @@
"select_channel": "Select a channel name",
"select_model": "Select a model",
"select_model_placeholder": "Select the model available under this channel",
"select_provider_placeholder": "Search for manufacturers",
"select_provider_placeholder": "Search protocol type",
"selected_model_empty": "Choose at least one model",
"start_test": "Batch test {{num}} models",
"test_failed": "There are {{num}} models that report errors",

View File

@@ -15,7 +15,7 @@
"channel_status_disabled": "禁用",
"channel_status_enabled": "启用",
"channel_status_unknown": "未知",
"channel_type": "厂商",
"channel_type": "协议类型",
"chart_mode_cumulative": "累积",
"chart_mode_incremental": "分时",
"clear_model": "清空模型",
@@ -74,7 +74,7 @@
"select_channel": "选择渠道名",
"select_model": "选择模型",
"select_model_placeholder": "选择该渠道下可用的模型",
"select_provider_placeholder": "搜索厂商",
"select_provider_placeholder": "搜索协议类型",
"selected_model_empty": "至少选择一个模型",
"start_test": "批量测试{{num}}个模型",
"test_failed": "有{{num}}个模型报错",

View File

@@ -15,7 +15,7 @@
"channel_status_disabled": "停用",
"channel_status_enabled": "啟用",
"channel_status_unknown": "未知",
"channel_type": "廠商",
"channel_type": "協議類型",
"chart_mode_cumulative": "累積",
"chart_mode_incremental": "分時",
"clear_model": "清空模型",
@@ -74,7 +74,7 @@
"select_channel": "選擇管道名稱",
"select_model": "選擇模型",
"select_model_placeholder": "選擇該管道下可用的模型",
"select_provider_placeholder": "搜尋廠商",
"select_provider_placeholder": "搜索協議類型",
"selected_model_empty": "至少選擇一個模型",
"start_test": "批次測試{{num}}個模型",
"test_failed": "有{{num}}個模型報錯",

View File

@@ -1,7 +1,10 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"baseUrl": "."
"baseUrl": ".",
"paths": {
"@fastgpt/web/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../**/*.d.ts"]
}

19
pnpm-lock.yaml generated
View File

@@ -182,8 +182,8 @@ importers:
specifier: workspace:*
version: link:../global
'@modelcontextprotocol/sdk':
specifier: ^1.10.2
version: 1.10.2
specifier: ^1.12.1
version: 1.12.2
'@node-rs/jieba':
specifier: 2.0.1
version: 2.0.1
@@ -511,8 +511,8 @@ importers:
specifier: ^3.0.6
version: 3.0.6
'@modelcontextprotocol/sdk':
specifier: ^1.10.2
version: 1.10.2
specifier: ^1.12.1
version: 1.12.2
'@node-rs/jieba':
specifier: 2.0.1
version: 2.0.1
@@ -695,8 +695,8 @@ importers:
projects/mcp_server:
dependencies:
'@modelcontextprotocol/sdk':
specifier: ^1.10.2
version: 1.10.2
specifier: ^1.12.1
version: 1.12.2
axios:
specifier: ^1.8.2
version: 1.8.4
@@ -2392,8 +2392,8 @@ packages:
'@mixmark-io/domino@2.2.0':
resolution: {integrity: sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw==}
'@modelcontextprotocol/sdk@1.10.2':
resolution: {integrity: sha512-rb6AMp2DR4SN+kc6L1ta2NCpApyA9WYNx3CrTSZvGxq9wH71bRur+zRqPfg0vQ9mjywR7qZdX2RGHOPq3ss+tA==}
'@modelcontextprotocol/sdk@1.12.2':
resolution: {integrity: sha512-ShQesHTyTZfcpjnMCUOH1gbhK9CZXL30GLFw4hN8qOiIaRKbkOl91uw79WP4v3Mh4QUffjkUO60ZtBUEzaRaOg==}
engines: {node: '>=18'}
'@monaco-editor/loader@1.5.0':
@@ -11872,8 +11872,9 @@ snapshots:
'@mixmark-io/domino@2.2.0': {}
'@modelcontextprotocol/sdk@1.10.2':
'@modelcontextprotocol/sdk@1.12.2':
dependencies:
ajv: 6.12.6
content-type: 1.0.5
cors: 2.8.5
cross-spawn: 7.0.6

View File

@@ -24,7 +24,7 @@
"@fastgpt/templates": "workspace:*",
"@fastgpt/web": "workspace:*",
"@fortaine/fetch-event-source": "^3.0.6",
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"@node-rs/jieba": "2.0.1",
"@tanstack/react-query": "^4.24.10",
"ahooks": "^3.7.11",

View File

@@ -137,7 +137,10 @@ const ChannelTable = ({ Tab }: { Tab: React.ReactNode }) => {
<Td>{item.name}</Td>
<Td>
<HStack>
<MyIcon name={provider?.avatar as any} w={'1rem'} />
<MyIcon
name={(providerData?.avatar || provider?.avatar) as any}
w={'1rem'}
/>
<Box>{t(providerData?.label as any)}</Box>
</HStack>
</Td>

View File

@@ -2,13 +2,11 @@ import { type FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node';
import { useTranslation } from 'next-i18next';
import { type NodeProps } from 'reactflow';
import NodeCard from '../render/NodeCard';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '../../../context';
import {
NodeInputKeyEnum,
NodeOutputKeyEnum,
toolValueTypeList,
WorkflowIOValueTypeEnum
} from '@fastgpt/global/core/workflow/constants';
import { Box, Flex, Table, TableContainer, Tbody, Td, Th, Thead, Tr } from '@chakra-ui/react';

View File

@@ -12,8 +12,7 @@ const NodeToolSet = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation();
const { inputs } = data;
const toolList: McpToolConfigType[] = inputs.find((item) => item.key === 'toolSetData')?.value
?.toolList;
const toolList: McpToolConfigType[] = inputs[0]?.value?.toolList;
return (
<NodeCard minW={'350px'} selected={selected} {...data}>

View File

@@ -15,7 +15,6 @@ import { pushTrack } from '@fastgpt/service/common/middle/tracks/utils';
import { checkTeamAppLimit } from '@fastgpt/service/support/permission/teamLimit';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { type StoreSecretValueType } from '@fastgpt/global/common/secret/type';
import { encryptSecret } from '@fastgpt/service/common/secret/aes256gcm';
import { storeSecretValue } from '@fastgpt/service/common/secret/utils';
export type createMCPToolsQuery = {};

View File

@@ -6,7 +6,7 @@ import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
import { type Tool } from '@modelcontextprotocol/sdk/types';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { toolValueTypeList } from '@fastgpt/global/core/workflow/constants';
import { toolValueTypeList, valueTypeJsonSchemaMap } from '@fastgpt/global/core/workflow/constants';
import { type AppChatConfigType } from '@fastgpt/global/core/app/type';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { type FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io';
@@ -51,9 +51,9 @@ export const pluginNodes2InputSchema = (
};
pluginInput?.inputs.forEach((input) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === input.valueType) || toolValueTypeList[0]
)?.jsonSchema;
const jsonSchema = input.valueType
? valueTypeJsonSchemaMap[input.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
schema.properties![input.key] = {
...jsonSchema,
@@ -96,9 +96,9 @@ export const workflow2InputSchema = (chatConfig?: {
};
chatConfig?.variables?.forEach((item) => {
const jsonSchema = (
toolValueTypeList.find((type) => type.value === item.valueType) || toolValueTypeList[0]
)?.jsonSchema;
const jsonSchema = item.valueType
? valueTypeJsonSchemaMap[item.valueType] || toolValueTypeList[0].jsonSchema
: toolValueTypeList[0].jsonSchema;
schema.properties![item.key] = {
...jsonSchema,

View File

@@ -110,6 +110,15 @@ export const getChannelList = () =>
GET<ChannelListResponseType>('/channels/all', {
page: 1,
perPage: 10
}).then((res) => {
res.sort((a, b) => {
if (a.status !== b.status) {
return a.status - b.status;
}
return b.priority - a.priority;
});
console.log(res);
return res;
});
export const getChannelProviders = () =>

View File

@@ -14,7 +14,7 @@
"mcp_test": "npx @modelcontextprotocol/inspector"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.10.2",
"@modelcontextprotocol/sdk": "^1.12.1",
"axios": "^1.8.2",
"chalk": "^5.3.0",
"dayjs": "^1.11.7",

View File

@@ -0,0 +1,122 @@
import { describe, expect, it } from 'vitest';
import type { JSONSchemaInputType } from '@fastgpt/global/core/app/jsonschema';
import { jsonSchema2NodeInput } from '@fastgpt/global/core/app/jsonschema';
describe('jsonSchema2NodeInput', () => {
it('should return correct node input', () => {
const jsonSchema: JSONSchemaInputType = {
type: 'object',
properties: {
name: { type: 'string' },
select: { type: 'string', enum: ['11', '22'] },
age: { type: 'number', minimum: 0, maximum: 100 },
boolean: { type: 'boolean' },
object: { type: 'object' },
strArr: { type: 'array', items: { type: 'string' } },
numArr: { type: 'array', items: { type: 'number' } },
boolArr: { type: 'array', items: { type: 'boolean' } },
objArr: { type: 'array', items: { type: 'object' } },
anyArr: { type: 'array', items: { type: 'array' } }
},
required: ['name', 'age']
};
const expectResponse = [
{
key: 'name',
label: 'name',
valueType: 'string',
toolDescription: 'name',
required: true,
renderTypeList: ['input']
},
{
key: 'select',
label: 'select',
valueType: 'string',
toolDescription: 'select',
required: false,
value: '11',
renderTypeList: ['select'],
list: [
{
label: '11',
value: '11'
},
{
label: '22',
value: '22'
}
]
},
{
key: 'age',
label: 'age',
valueType: 'number',
toolDescription: 'age',
required: true,
renderTypeList: ['numberInput'],
max: 100,
min: 0
},
{
key: 'boolean',
label: 'boolean',
valueType: 'boolean',
toolDescription: 'boolean',
required: false,
renderTypeList: ['switch']
},
{
key: 'object',
label: 'object',
valueType: 'object',
toolDescription: 'object',
required: false,
renderTypeList: ['JSONEditor']
},
{
key: 'strArr',
label: 'strArr',
valueType: 'arrayString',
toolDescription: 'strArr',
required: false,
renderTypeList: ['JSONEditor']
},
{
key: 'numArr',
label: 'numArr',
valueType: 'arrayNumber',
toolDescription: 'numArr',
required: false,
renderTypeList: ['JSONEditor']
},
{
key: 'boolArr',
label: 'boolArr',
valueType: 'arrayBoolean',
toolDescription: 'boolArr',
required: false,
renderTypeList: ['JSONEditor']
},
{
key: 'objArr',
label: 'objArr',
valueType: 'arrayObject',
toolDescription: 'objArr',
required: false,
renderTypeList: ['JSONEditor']
},
{
key: 'anyArr',
label: 'anyArr',
valueType: 'arrayAny',
toolDescription: 'anyArr',
required: false,
renderTypeList: ['JSONEditor']
}
];
const result = jsonSchema2NodeInput(jsonSchema);
expect(result).toEqual(expectResponse);
});
});

View File

@@ -1,293 +0,0 @@
import { describe, expect, it, vi, beforeEach } from 'vitest';
import {
GET,
POST,
PUT,
DELETE,
responseSuccess,
checkRes,
responseError,
getChannelList,
getChannelProviders,
postCreateChannel,
putChannelStatus,
putChannel,
deleteChannel,
getChannelLog,
getLogDetail,
getDashboardV2,
instance
} from '@/web/core/ai/channel';
import axios from 'axios';
vi.mock('axios', () => ({
default: {
create: vi.fn(() => ({
request: vi.fn(),
interceptors: {
response: {
use: vi.fn()
}
}
}))
}
}));
vi.mock('@fastgpt/web/common/system/utils', () => ({
getWebReqUrl: () => 'http://localhost:3000'
}));
describe('channel api', () => {
const mockAxiosInstance = axios.create();
beforeEach(() => {
vi.clearAllMocks();
});
it('should handle successful response', () => {
const response = {
data: {
success: true,
data: { test: 'data' }
}
};
expect(responseSuccess(response)).toBe(response);
});
it('should check response data', async () => {
const validData = {
success: true,
message: 'ok',
data: { test: 'data' }
};
await expect(Promise.resolve(checkRes(validData))).resolves.toEqual({ test: 'data' });
const invalidData = {
success: false,
message: 'error'
};
await expect(Promise.resolve(checkRes(invalidData))).rejects.toEqual(invalidData);
await expect(Promise.resolve(checkRes(undefined))).rejects.toBe('服务器异常');
});
it('should handle response error', async () => {
await expect(responseError('test error')).rejects.toEqual({
message: 'test error'
});
await expect(responseError({ response: { data: 'error data' } })).rejects.toBe('error data');
await expect(responseError(undefined)).rejects.toEqual({
message: '未知错误'
});
});
it('should make GET request', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: { result: 'test' }
}
});
const result = await GET('/test');
expect(result).toEqual({ result: 'test' });
});
it('should make POST request', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: { id: 1 }
}
});
const result = await POST('/test', { name: 'test' });
expect(result).toEqual({ id: 1 });
});
it('should get channel list', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: { channels: [], total: 0 }
}
});
const result = await getChannelList();
expect(result).toEqual({ channels: [], total: 0 });
});
it('should get channel providers', async () => {
const mockProviders = {
1: {
defaultBaseUrl: 'test.com',
keyHelp: 'help',
name: 'Test Provider'
}
};
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: mockProviders
}
});
const result = await getChannelProviders();
expect(result).toEqual(mockProviders);
});
it('should create channel', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: { id: 1 }
}
});
const result = await postCreateChannel({
type: 1,
name: 'test',
base_url: 'test.com',
models: [],
model_mapping: {},
key: 'key'
});
expect(result).toEqual({ id: 1 });
});
it('should update channel status', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: true
}
});
const result = await putChannelStatus(1, 'active');
expect(result).toBe(true);
});
it('should update channel', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: { id: 1 }
}
});
const result = await putChannel({
id: 1,
type: 1,
name: 'test',
base_url: 'test.com',
models: [],
model_mapping: {},
key: 'key',
status: 'active'
});
expect(result).toEqual({ id: 1 });
});
it('should delete channel', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: true
}
});
const result = await deleteChannel(1);
expect(result).toBe(true);
});
it('should get channel logs', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: {
logs: [],
total: 0
}
}
});
const result = await getChannelLog({
start_timestamp: 0,
end_timestamp: 1000,
offset: 0,
pageSize: 10
});
expect(result).toEqual({
list: [],
total: 0
});
});
it('should get log detail', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: {
request_body: '{}',
response_body: '{}'
}
}
});
const result = await getLogDetail(1);
expect(result).toEqual({
request_body: '{}',
response_body: '{}'
});
});
it('should get dashboard data', async () => {
vi.spyOn(instance, 'request').mockResolvedValueOnce({
data: {
success: true,
data: [
{
timestamp: 1000,
summary: [
{
model: 'gpt-3.5',
total_tokens: 100,
request_count: 10,
total_price: 0.002
}
]
}
]
}
});
const result = await getDashboardV2({
channel: 1,
model: 'gpt-3.5',
start_timestamp: 0,
end_timestamp: 1000,
timezone: 'UTC',
timespan: 'minute'
});
expect(result).toEqual([
{
timestamp: 1000,
summary: [
{
model: 'gpt-3.5',
total_tokens: 100,
request_count: 10,
total_price: 0.002
}
]
}
]);
});
});