feature: V4.11.1 (#5350)

* perf: system toolset & mcp (#5200)

* feat: support system toolset

* fix: type

* fix: system tool config

* chore: mcptool config migrate

* refactor: mcp toolset

* fix: fe type error

* fix: type error

* fix: show version

* chore: support extract tool's secretInputConfig out of inputs

* chore: compatible with old version mcp

* chore: adjust

* deps: update dependency @fastgpt-skd/plugin

* fix: version

* fix: some bug (#5316)

* chore: compatible with old version mcp

* fix: version

* fix: compatible bug

* fix: mcp object params

* fix: type error

* chore: update test cases

* chore: remove log

* fix: toolset node name

* optimize app logs sort (#5310)

* log keys config modal

* multiple select

* api

* fontsize

* code

* chatid

* fix build

* fix

* fix component

* change name

* log keys config

* fix

* delete unused

* fix

* perf: log code

* perf: send auth code modal enter press

* fix log (#5328)

* perf: mcp toolset comment

* perf: log ui

* remove log (#5347)

* doc

* fix: action

* remove log

* fix: Table Optimization (#5319)

* feat: table test: 1

* feat: table test: 2

* feat: table test: 3

* feat: table test: 4

* feat: table test : 5 把maxSize改回chunkSize

* feat: table test : 6 都删了,只看maxSize

* feat: table test : 7 恢复初始,接下来删除标签功能

* feat: table test : 8 删除标签功能

* feat: table test : 9 删除标签功能成功

* feat: table test : 10 继续调试,修改trainingStates

* feat: table test : 11 修改第一步

* feat: table test : 12 修改第二步

* feat: table test : 13 修改了HtmlTable2Md

* feat: table test : 14 修改表头分块规则

* feat: table test : 15 前面表格分的太细了

* feat: table test : 16 改着改着表头又不加了

* feat: table test : 17 用CUSTOM_SPLIT_SIGN不行,重新改

* feat: table test : 18 表头仍然还会多加,但现在分块搞的合理了终于

* feat: table test : 19 还是需要搞好表头问题,先保存一下调试情况

* feat: table test : 20 调试结束,看一下replace有没有问题,没问题就pr

* feat: table test : 21 先把注释删了

* feat: table test : 21 注释replace都改了,下面切main分支看看情况

* feat: table test : 22 修改旧文件

* feat: table test : 23 修改测试文件

* feat: table test : 24 xlsx表格处理

* feat: table test : 25 刚才没保存先com了

* feat: table test : 26 fix

* feat: table test : 27 先com一版调试

* feat: table test : 28 试试放format2csv里

* feat: table test : 29 xlsx解决

* feat: table test : 30 tablesplit解决

* feat: table test : 31

* feat: table test : 32

* perf: table split

* perf: mcp old version compatibility (#5342)

* fix: system-tool secret inputs

* fix: rewrite runtime node i18n for system tool

* perf: mcp old version compatibility

* fix: splitPluginId

* fix: old mcp toolId

* fix: filter secret key

* feat: support system toolset activation

* chore: remove log

* perf: mcp update

* perf: rewrite toolset

* fix:delete variable id (#5335)

* perf: variable update

* fix: multiple select ui

* perf: model config move to plugin

* fix: var conflit

* perf: variable checker

* Avoid empty number

* update doc time

* fix: test

* fix: mcp object

* update count app

* update count app

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: heheer <zhiyu44@qq.com>
Co-authored-by: colnii <1286949794@qq.com>
Co-authored-by: dreamer6680 <1468683855@qq.com>
This commit is contained in:
Archer
2025-08-01 16:08:20 +08:00
committed by GitHub
parent e0c21a949c
commit e25d7efb5b
143 changed files with 2596 additions and 4177 deletions
@@ -1,10 +0,0 @@
{
"provider": "AliCloud",
"list": [
{
"model": "SenseVoiceSmall",
"name": "SenseVoiceSmall",
"type": "stt"
}
]
}
@@ -1,17 +0,0 @@
{
"provider": "BAAI",
"list": [
{
"model": "bge-m3",
"name": "bge-m3",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
},
{
"model": "bge-reranker-v2-m3",
"name": "bge-reranker-v2-m3",
"type": "rerank"
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "Baichuan",
"list": []
}
@@ -1,121 +0,0 @@
{
"provider": "ChatGLM",
"list": [
{
"model": "glm-4-air",
"name": "glm-4-air",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 0.99,
"showTopP": true,
"responseFormatList": ["text", "json_object"],
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "glm-4-flash",
"name": "glm-4-flash",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 0.99,
"showTopP": true,
"responseFormatList": ["text", "json_object"],
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "glm-4-long",
"name": "glm-4-long",
"maxContext": 1000000,
"maxResponse": 4000,
"quoteMaxToken": 900000,
"maxTemperature": 0.99,
"showTopP": true,
"responseFormatList": ["text", "json_object"],
"showStopSign": true,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "glm-4-plus",
"name": "GLM-4-plus",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 0.99,
"showTopP": true,
"responseFormatList": ["text", "json_object"],
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "glm-4v-flash",
"name": "glm-4v-flash",
"maxContext": 8000,
"maxResponse": 1000,
"quoteMaxToken": 6000,
"maxTemperature": 0.99,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "glm-4v-plus",
"name": "GLM-4v-plus",
"maxContext": 8000,
"maxResponse": 1000,
"quoteMaxToken": 6000,
"maxTemperature": 0.99,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "embedding-3",
"name": "embedding-3",
"defaultToken": 512,
"maxToken": 8000,
"defaultConfig": {
"dimensions": 1024
},
"type": "embedding"
}
]
}
@@ -1,124 +0,0 @@
{
"provider": "Claude",
"list": [
{
"model": "claude-sonnet-4-20250514",
"name": "claude-sonnet-4-20250514",
"maxContext": 200000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-opus-4-20250514",
"name": "claude-opus-4-20250514",
"maxContext": 200000,
"maxResponse": 4096,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-3-7-sonnet-20250219",
"name": "claude-3-7-sonnet-20250219",
"maxContext": 200000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-3-5-haiku-20241022",
"name": "claude-3-5-haiku-20241022",
"maxContext": 200000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-3-5-sonnet-20240620",
"name": "Claude-3-5-sonnet-20240620",
"maxContext": 200000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-3-5-sonnet-20241022",
"name": "Claude-3-5-sonnet-20241022",
"maxContext": 200000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "claude-3-opus-20240229",
"name": "claude-3-opus-20240229",
"maxContext": 200000,
"maxResponse": 4096,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
}
]
}
@@ -1,39 +0,0 @@
{
"provider": "DeepSeek",
"list": [
{
"model": "deepseek-chat",
"name": "Deepseek-chat",
"maxContext": 64000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"showTopP": true,
"responseFormatList": ["text", "json_object"],
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm"
},
{
"model": "deepseek-reasoner",
"name": "Deepseek-reasoner",
"maxContext": 64000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": null,
"vision": false,
"reasoning": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
}
]
}
@@ -1,276 +0,0 @@
{
"provider": "Doubao",
"list": [
{
"model": "doubao-seed-1-6-250615",
"name": "doubao-seed-1-6-250615",
"maxContext": 220000,
"maxResponse": 16000,
"quoteMaxToken": 220000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "doubao-seed-1-6-flash-250615",
"name": "doubao-seed-1-6-flash-250615",
"maxContext": 220000,
"maxResponse": 16000,
"quoteMaxToken": 220000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "doubao-seed-1-6-thinking-250615",
"name": "doubao-seed-1-6-thinking-250615",
"maxContext": 220000,
"maxResponse": 16000,
"quoteMaxToken": 220000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-1.5-lite-32k",
"name": "Doubao-1.5-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-1.5-pro-32k",
"name": "Doubao-1.5-pro-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-1.5-pro-256k",
"name": "Doubao-1.5-pro-256k",
"maxContext": 256000,
"maxResponse": 12000,
"quoteMaxToken": 256000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-1.5-vision-pro-32k",
"name": "Doubao-1.5-vision-pro-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-lite-4k",
"name": "Doubao-lite-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-lite-32k",
"name": "Doubao-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "Doubao-lite-128k",
"name": "Doubao-lite-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-vision-lite-32k",
"name": "Doubao-vision-lite-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-pro-4k",
"name": "Doubao-pro-4k",
"maxContext": 4000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-pro-32k",
"name": "Doubao-pro-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-pro-128k",
"name": "Doubao-pro-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-vision-pro-32k",
"name": "Doubao-vision-pro-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Doubao-embedding-large",
"name": "Doubao-embedding-large",
"defaultToken": 512,
"maxToken": 4096,
"type": "embedding",
"normalization": true
},
{
"model": "Doubao-embedding",
"name": "Doubao-embedding",
"defaultToken": 512,
"maxToken": 4096,
"type": "embedding",
"normalization": true
}
]
}
@@ -1,87 +0,0 @@
{
"provider": "Ernie",
"list": [
{
"model": "ERNIE-4.0-8K",
"name": "ERNIE-4.0-8K",
"maxContext": 8000,
"maxResponse": 2048,
"quoteMaxToken": 5000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "ERNIE-4.0-Turbo-8K",
"name": "ERNIE-4.0-Turbo-8K",
"maxContext": 8000,
"maxResponse": 2048,
"quoteMaxToken": 5000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "ERNIE-Lite-8K",
"name": "ERNIE-lite-8k",
"maxContext": 8000,
"maxResponse": 2048,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "ERNIE-Speed-128K",
"name": "ERNIE-Speed-128K",
"maxContext": 128000,
"maxResponse": 4096,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Embedding-V1",
"name": "Embedding-V1",
"defaultToken": 512,
"maxToken": 1000,
"type": "embedding"
},
{
"model": "tao-8k",
"name": "tao-8k",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "FishAudio",
"list": []
}
@@ -1,214 +0,0 @@
{
"provider": "Gemini",
"list": [
{
"model": "gemini-2.5-pro",
"name": "gemini-2.5-pro",
"maxContext": 1000000,
"maxResponse": 63000,
"quoteMaxToken": 1000000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.5-flash",
"name": "gemini-2.5-flash",
"maxContext": 1000000,
"maxResponse": 63000,
"quoteMaxToken": 1000000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.5-pro-exp-03-25",
"name": "gemini-2.5-pro-exp-03-25",
"maxContext": 1000000,
"maxResponse": 63000,
"quoteMaxToken": 1000000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.5-flash-preview-04-17",
"name": "gemini-2.5-flash-preview-04-17",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.0-flash",
"name": "gemini-2.0-flash",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.0-pro-exp",
"name": "gemini-2.0-pro-exp",
"maxContext": 2000000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-1.5-flash",
"name": "gemini-1.5-flash",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-1.5-pro",
"name": "gemini-1.5-pro",
"maxContext": 2000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.0-flash-exp",
"name": "gemini-2.0-flash-exp",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.0-flash-thinking-exp-1219",
"name": "gemini-2.0-flash-thinking-exp-1219",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-2.0-flash-thinking-exp-01-21",
"name": "gemini-2.0-flash-thinking-exp-01-21",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gemini-exp-1206",
"name": "gemini-exp-1206",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "text-embedding-004",
"name": "text-embedding-004",
"defaultToken": 512,
"maxToken": 2000,
"type": "embedding"
}
]
}
@@ -1,105 +0,0 @@
{
"provider": "Grok",
"list": [
{
"model": "grok-4",
"name": "grok-4",
"maxContext": 256000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "grok-4-0709",
"name": "grok-4-0709",
"maxContext": 256000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "grok-3-mini",
"name": "grok-3-mini",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "grok-3-mini-fast",
"name": "grok-3-mini-fast",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "grok-3",
"name": "grok-3",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "grok-3-fast",
"name": "grok-3-fast",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
}
]
}
@@ -1,37 +0,0 @@
{
"provider": "Groq",
"list": [
{
"model": "llama-3.1-8b-instant",
"name": "Groq-llama-3.1-8b-instant",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "llama-3.3-70b-versatile",
"name": "Groq-llama-3.3-70b-versatile",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
}
]
}
@@ -1,131 +0,0 @@
{
"provider": "Hunyuan",
"list": [
{
"model": "hunyuan-large",
"name": "hunyuan-large",
"maxContext": 28000,
"maxResponse": 4000,
"quoteMaxToken": 20000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-lite",
"name": "hunyuan-lite",
"maxContext": 250000,
"maxResponse": 6000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-pro",
"name": "hunyuan-pro",
"maxContext": 28000,
"maxResponse": 4000,
"quoteMaxToken": 28000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-standard",
"name": "hunyuan-standard",
"maxContext": 32000,
"maxResponse": 2000,
"quoteMaxToken": 20000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-turbo-vision",
"name": "hunyuan-turbo-vision",
"maxContext": 6000,
"maxResponse": 2000,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-turbo",
"name": "hunyuan-turbo",
"maxContext": 28000,
"maxResponse": 4000,
"quoteMaxToken": 20000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-vision",
"name": "hunyuan-vision",
"maxContext": 6000,
"maxResponse": 2000,
"quoteMaxToken": 4000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "hunyuan-embedding",
"name": "hunyuan-embedding",
"defaultToken": 512,
"maxToken": 1024,
"type": "embedding"
}
]
}
@@ -1,39 +0,0 @@
{
"provider": "Intern",
"list": [
{
"model": "internlm2-pro-chat",
"name": "internlm2-pro-chat",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "internlm3-8b-instruct",
"name": "internlm3-8b-instruct",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "Meta",
"list": []
}
@@ -1,230 +0,0 @@
{
"provider": "MiniMax",
"list": [
{
"model": "MiniMax-Text-01",
"name": "MiniMax-Text-01",
"maxContext": 1000000,
"maxResponse": 1000000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "abab6.5s-chat",
"name": "MiniMax-abab6.5s",
"maxContext": 245000,
"maxResponse": 10000,
"quoteMaxToken": 240000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "speech-01-turbo",
"name": "speech-01-turbo",
"voices": [
{
"label": "male-qn-qingse",
"value": "male-qn-qingse"
},
{
"label": "male-qn-jingying",
"value": "male-qn-jingying"
},
{
"label": "male-qn-badao",
"value": "male-qn-badao"
},
{
"label": "male-qn-daxuesheng",
"value": "male-qn-daxuesheng"
},
{
"label": "female-shaonv",
"value": "female-shaonv"
},
{
"label": "female-yujie",
"value": "female-yujie"
},
{
"label": "female-chengshu",
"value": "female-chengshu"
},
{
"label": "female-tianmei",
"value": "female-tianmei"
},
{
"label": "presenter_male",
"value": "presenter_male"
},
{
"label": "presenter_female",
"value": "presenter_female"
},
{
"label": "audiobook_male_1",
"value": "audiobook_male_1"
},
{
"label": "audiobook_male_2",
"value": "audiobook_male_2"
},
{
"label": "audiobook_female_1",
"value": "audiobook_female_1"
},
{
"label": "audiobook_female_2",
"value": "audiobook_female_2"
},
{
"label": "male-qn-qingse-jingpin",
"value": "male-qn-qingse-jingpin"
},
{
"label": "male-qn-jingying-jingpin",
"value": "male-qn-jingying-jingpin"
},
{
"label": "male-qn-badao-jingpin",
"value": "male-qn-badao-jingpin"
},
{
"label": "male-qn-daxuesheng-jingpin",
"value": "male-qn-daxuesheng-jingpin"
},
{
"label": "female-shaonv-jingpin",
"value": "female-shaonv-jingpin"
},
{
"label": "female-yujie-jingpin",
"value": "female-yujie-jingpin"
},
{
"label": "female-chengshu-jingpin",
"value": "female-chengshu-jingpin"
},
{
"label": "female-tianmei-jingpin",
"value": "female-tianmei-jingpin"
},
{
"label": "clever_boy",
"value": "clever_boy"
},
{
"label": "cute_boy",
"value": "cute_boy"
},
{
"label": "lovely_girl",
"value": "lovely_girl"
},
{
"label": "cartoon_pig",
"value": "cartoon_pig"
},
{
"label": "bingjiao_didi",
"value": "bingjiao_didi"
},
{
"label": "junlang_nanyou",
"value": "junlang_nanyou"
},
{
"label": "chunzhen_xuedi",
"value": "chunzhen_xuedi"
},
{
"label": "lengdan_xiongzhang",
"value": "lengdan_xiongzhang"
},
{
"label": "badao_shaoye",
"value": "badao_shaoye"
},
{
"label": "tianxin_xiaoling",
"value": "tianxin_xiaoling"
},
{
"label": "qiaopi_mengmei",
"value": "qiaopi_mengmei"
},
{
"label": "wumei_yujie",
"value": "wumei_yujie"
},
{
"label": "diadia_xuemei",
"value": "diadia_xuemei"
},
{
"label": "danya_xuejie",
"value": "danya_xuejie"
},
{
"label": "Santa_Claus",
"value": "Santa_Claus"
},
{
"label": "Grinch",
"value": "Grinch"
},
{
"label": "Rudolph",
"value": "Rudolph"
},
{
"label": "Arnold",
"value": "Arnold"
},
{
"label": "Charming_Santa",
"value": "Charming_Santa"
},
{
"label": "Charming_Lady",
"value": "Charming_Lady"
},
{
"label": "Sweet_Girl",
"value": "Sweet_Girl"
},
{
"label": "Cute_Elf",
"value": "Cute_Elf"
},
{
"label": "Attractive_Girl",
"value": "Attractive_Girl"
},
{
"label": "Serene_Woman",
"value": "Serene_Woman"
}
],
"type": "tts"
}
]
}
@@ -1,73 +0,0 @@
{
"provider": "MistralAI",
"list": [
{
"model": "ministral-3b-latest",
"name": "Ministral-3b-latest",
"maxContext": 130000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "ministral-8b-latest",
"name": "Ministral-8b-latest",
"maxContext": 130000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "mistral-large-latest",
"name": "Mistral-large-latest",
"maxContext": 130000,
"maxResponse": 8000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "mistral-small-latest",
"name": "Mistral-small-latest",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1.2,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "Moka",
"list": []
}
@@ -1,181 +0,0 @@
{
"provider": "Moonshot",
"list": [
{
"model": "kimi-k2-0711-preview",
"name": "kimi-k2-0711-preview",
"maxContext": 128000,
"maxResponse": 32000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "kimi-latest-8k",
"name": "kimi-latest-8k",
"maxContext": 8000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "kimi-latest-32k",
"name": "kimi-latest-32k",
"maxContext": 32000,
"maxResponse": 16000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "kimi-latest-128k",
"name": "kimi-latest-128k",
"maxContext": 128000,
"maxResponse": 32000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-8k",
"name": "moonshot-v1-8k",
"maxContext": 8000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-32k",
"name": "moonshot-v1-32k",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-128k",
"name": "moonshot-v1-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-8k-vision-preview",
"name": "moonshot-v1-8k-vision-preview",
"maxContext": 8000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-32k-vision-preview",
"name": "moonshot-v1-32k-vision-preview",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "moonshot-v1-128k-vision-preview",
"name": "moonshot-v1-128k-vision-preview",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 60000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "Ollama",
"list": []
}
@@ -1,301 +0,0 @@
{
"provider": "OpenAI",
"list": [
{
"model": "gpt-4.1",
"name": "gpt-4.1",
"maxContext": 1000000,
"maxResponse": 32000,
"quoteMaxToken": 1000000,
"maxTemperature": 1.2,
"showTopP": true,
"responseFormatList": ["text", "json_object", "json_schema"],
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "gpt-4.1-mini",
"name": "gpt-4.1-mini",
"maxContext": 1000000,
"maxResponse": 32000,
"quoteMaxToken": 1000000,
"maxTemperature": 1.2,
"showTopP": true,
"responseFormatList": ["text", "json_object", "json_schema"],
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "gpt-4.1-nano",
"name": "gpt-4.1-nano",
"maxContext": 1000000,
"maxResponse": 32000,
"quoteMaxToken": 1000000,
"maxTemperature": 1.2,
"showTopP": true,
"responseFormatList": ["text", "json_object", "json_schema"],
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "gpt-4o-mini",
"name": "GPT-4o-mini",
"maxContext": 128000,
"maxResponse": 16000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"showTopP": true,
"responseFormatList": ["text", "json_object", "json_schema"],
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "gpt-4o",
"name": "GPT-4o",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"showTopP": true,
"responseFormatList": ["text", "json_object", "json_schema"],
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm"
},
{
"model": "o4-mini",
"name": "o4-mini",
"maxContext": 200000,
"maxResponse": 100000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": false
},
{
"model": "o3",
"name": "o3",
"maxContext": 200000,
"maxResponse": 100000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": false
},
{
"model": "o3-mini",
"name": "o3-mini",
"maxContext": 200000,
"maxResponse": 100000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "o1",
"name": "o1",
"maxContext": 195000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "o1-mini",
"name": "o1-mini",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "o1-preview",
"name": "o1-preview",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 120000,
"maxTemperature": null,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": false
},
"fieldMap": {
"max_tokens": "max_completion_tokens"
},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "gpt-3.5-turbo",
"name": "gpt-3.5-turbo",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 13000,
"maxTemperature": 1.2,
"showTopP": true,
"showStopSign": true,
"vision": false,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"type": "llm"
},
{
"model": "gpt-4-turbo",
"name": "gpt-4-turbo",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 60000,
"maxTemperature": 1.2,
"showTopP": true,
"showStopSign": true,
"vision": true,
"toolChoice": true,
"functionCall": true,
"defaultSystemChatPrompt": "",
"type": "llm"
},
{
"model": "text-embedding-3-large",
"name": "text-embedding-3-large",
"defaultToken": 512,
"maxToken": 8000,
"defaultConfig": {
"dimensions": 1024
},
"type": "embedding"
},
{
"model": "text-embedding-3-small",
"name": "text-embedding-3-small",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
},
{
"model": "text-embedding-ada-002",
"name": "text-embedding-ada-002",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
},
{
"model": "tts-1",
"name": "TTS1",
"voices": [
{
"label": "Alloy",
"value": "alloy"
},
{
"label": "Echo",
"value": "echo"
},
{
"label": "Fable",
"value": "fable"
},
{
"label": "Onyx",
"value": "onyx"
},
{
"label": "Nova",
"value": "nova"
},
{
"label": "Shimmer",
"value": "shimmer"
}
],
"type": "tts"
},
{
"model": "whisper-1",
"name": "whisper-1",
"type": "stt"
}
]
}
@@ -1,4 +0,0 @@
{
"provider": "Other",
"list": []
}
@@ -1,4 +0,0 @@
{
"provider": "PPIO",
"list": []
}
@@ -1,440 +0,0 @@
{
"provider": "Qwen",
"list": [
{
"model": "qwen-max",
"name": "Qwen-max",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen-vl-max",
"name": "qwen-vl-max",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "qwen-plus",
"name": "Qwen-plus",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen-vl-plus",
"name": "qwen-vl-plus",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 120000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "qwen-turbo",
"name": "Qwen-turbo",
"maxContext": 1000000,
"maxResponse": 8000,
"quoteMaxToken": 1000000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-235b-a22b",
"name": "qwen3-235b-a22b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-32b",
"name": "qwen3-32b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-30b-a3b",
"name": "qwen3-30b-a3b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-14b",
"name": "qwen3-14b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-8b",
"name": "qwen3-8b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-4b",
"name": "qwen3-4b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-1.7b",
"name": "qwen3-1.7b",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 30000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-0.6b",
"name": "qwen3-0.6b",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 30000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwq-plus",
"name": "qwq-plus",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": null,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"usedInQueryExtension": false,
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
},
{
"model": "qwq-32b",
"name": "qwq-32b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": null,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"usedInQueryExtension": false,
"defaultConfig": {
"stream": true
},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
},
{
"model": "qwen-coder-turbo",
"name": "qwen-coder-turbo",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "qwen2.5-7b-instruct",
"name": "qwen2.5-7b-instruct",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen2.5-14b-instruct",
"name": "qwen2.5-14b-instruct",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen2.5-32b-instruct",
"name": "qwen2.5-32b-instruct",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen2.5-72b-instruct",
"name": "Qwen2.5-72B-instruct",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen-long",
"name": "qwen-long",
"maxContext": 10000000,
"maxResponse": 6000,
"quoteMaxToken": 10000000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"usedInQueryExtension": false,
"usedInToolCall": false,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
},
{
"model": "text-embedding-v4",
"name": "text-embedding-v4",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding",
"defaultConfig": {
"dimensions": 1536
}
},
{
"model": "text-embedding-v3",
"name": "text-embedding-v3",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
},
{
"model": "gte-rerank-v2",
"name": "gte-rerank-v2",
"type": "rerank"
}
]
}
@@ -1,194 +0,0 @@
{
"provider": "Siliconflow",
"list": [
{
"model": "Qwen/Qwen2.5-72B-Instruct",
"name": "Qwen/Qwen2.5-72B-Instruct",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 50000,
"maxTemperature": 1,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "Qwen/Qwen2-VL-72B-Instruct",
"name": "Qwen/Qwen2-VL-72B-Instruct",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"censor": false,
"vision": true,
"datasetProcess": false,
"usedInClassify": false,
"usedInExtractFields": false,
"usedInToolCall": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "deepseek-ai/DeepSeek-V2.5",
"name": "deepseek-ai/DeepSeek-V2.5",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "BAAI/bge-m3",
"name": "BAAI/bge-m3",
"defaultToken": 512,
"maxToken": 8000,
"type": "embedding"
},
{
"model": "FunAudioLLM/CosyVoice2-0.5B",
"name": "FunAudioLLM/CosyVoice2-0.5B",
"voices": [
{
"label": "alex",
"value": "FunAudioLLM/CosyVoice2-0.5B:alex"
},
{
"label": "anna",
"value": "FunAudioLLM/CosyVoice2-0.5B:anna"
},
{
"label": "bella",
"value": "FunAudioLLM/CosyVoice2-0.5B:bella"
},
{
"label": "benjamin",
"value": "FunAudioLLM/CosyVoice2-0.5B:benjamin"
},
{
"label": "charles",
"value": "FunAudioLLM/CosyVoice2-0.5B:charles"
},
{
"label": "claire",
"value": "FunAudioLLM/CosyVoice2-0.5B:claire"
},
{
"label": "david",
"value": "FunAudioLLM/CosyVoice2-0.5B:david"
},
{
"label": "diana",
"value": "FunAudioLLM/CosyVoice2-0.5B:diana"
}
],
"type": "tts"
},
{
"model": "RVC-Boss/GPT-SoVITS",
"name": "RVC-Boss/GPT-SoVITS",
"voices": [
{
"label": "alex",
"value": "RVC-Boss/GPT-SoVITS:alex"
},
{
"label": "anna",
"value": "RVC-Boss/GPT-SoVITS:anna"
},
{
"label": "bella",
"value": "RVC-Boss/GPT-SoVITS:bella"
},
{
"label": "benjamin",
"value": "RVC-Boss/GPT-SoVITS:benjamin"
},
{
"label": "charles",
"value": "RVC-Boss/GPT-SoVITS:charles"
},
{
"label": "claire",
"value": "RVC-Boss/GPT-SoVITS:claire"
},
{
"label": "david",
"value": "RVC-Boss/GPT-SoVITS:david"
},
{
"label": "diana",
"value": "RVC-Boss/GPT-SoVITS:diana"
}
],
"type": "tts"
},
{
"model": "fishaudio/fish-speech-1.5",
"name": "fish-speech-1.5",
"voices": [
{
"label": "alex",
"value": "fishaudio/fish-speech-1.5:alex"
},
{
"label": "anna",
"value": "fishaudio/fish-speech-1.5:anna"
},
{
"label": "bella",
"value": "fishaudio/fish-speech-1.5:bella"
},
{
"label": "benjamin",
"value": "fishaudio/fish-speech-1.5:benjamin"
},
{
"label": "charles",
"value": "fishaudio/fish-speech-1.5:charles"
},
{
"label": "claire",
"value": "fishaudio/fish-speech-1.5:claire"
},
{
"label": "david",
"value": "fishaudio/fish-speech-1.5:david"
},
{
"label": "diana",
"value": "fishaudio/fish-speech-1.5:diana"
}
],
"type": "tts"
},
{
"model": "FunAudioLLM/SenseVoiceSmall",
"name": "FunAudioLLM/SenseVoiceSmall",
"type": "stt"
},
{
"model": "BAAI/bge-reranker-v2-m3",
"name": "BAAI/bge-reranker-v2-m3",
"type": "rerank"
}
]
}
@@ -1,99 +0,0 @@
{
"provider": "SparkDesk",
"list": [
{
"model": "lite",
"name": "SparkDesk-lite",
"maxContext": 32000,
"maxResponse": 4000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "generalv3",
"name": "SparkDesk-Pro",
"maxContext": 8000,
"maxResponse": 8000,
"quoteMaxToken": 8000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "pro-128k",
"name": "SparkDesk-Pro-128k",
"maxContext": 128000,
"maxResponse": 4000,
"quoteMaxToken": 128000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "generalv3.5",
"name": "SparkDesk-max",
"maxContext": 8000,
"maxResponse": 8000,
"quoteMaxToken": 8000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "max-32k",
"name": "SparkDesk-max-32k",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 32000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "4.0Ultra",
"name": "SparkDesk-v4.0 Ultra",
"maxContext": 8000,
"maxResponse": 8000,
"quoteMaxToken": 8000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
}
]
}
@@ -1,253 +0,0 @@
{
"provider": "StepFun",
"list": [
{
"model": "step-1-flash",
"name": "step-1-flash",
"maxContext": 8000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1-8k",
"name": "step-1-8k",
"maxContext": 8000,
"maxResponse": 8000,
"quoteMaxToken": 8000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1-32k",
"name": "step-1-32k",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 32000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1-128k",
"name": "step-1-128k",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 128000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1-256k",
"name": "step-1-256k",
"maxContext": 256000,
"maxResponse": 8000,
"quoteMaxToken": 256000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1o-vision-32k",
"name": "step-1o-vision-32k",
"maxContext": 32000,
"quoteMaxToken": 32000,
"maxResponse": 8000,
"maxTemperature": 2,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1v-8k",
"name": "step-1v-8k",
"maxContext": 8000,
"maxResponse": 8000,
"quoteMaxToken": 8000,
"maxTemperature": 2,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-1v-32k",
"name": "step-1v-32k",
"maxContext": 32000,
"quoteMaxToken": 32000,
"maxResponse": 8000,
"maxTemperature": 2,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-2-mini",
"name": "step-2-mini",
"maxContext": 8000,
"maxResponse": 4000,
"quoteMaxToken": 6000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-2-16k",
"name": "step-2-16k",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-2-16k-exp",
"name": "step-2-16k-exp",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 4000,
"maxTemperature": 2,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "step-tts-mini",
"name": "step-tts-mini",
"voices": [
{
"label": "cixingnansheng",
"value": "cixingnansheng"
},
{
"label": "zhengpaiqingnian",
"value": "zhengpaiqingnian"
},
{
"label": "yuanqinansheng",
"value": "yuanqinansheng"
},
{
"label": "qingniandaxuesheng",
"value": "qingniandaxuesheng"
},
{
"label": "boyinnansheng",
"value": "boyinnansheng"
},
{
"label": "ruyananshi",
"value": "ruyananshi"
},
{
"label": "shenchennanyin",
"value": "shenchennanyin"
},
{
"label": "qinqienvsheng",
"value": "qinqienvsheng"
},
{
"label": "wenrounvsheng",
"value": "wenrounvsheng"
},
{
"label": "jilingshaonv",
"value": "jilingshaonv"
},
{
"label": "yuanqishaonv",
"value": "yuanqishaonv"
},
{
"label": "ruanmengnvsheng",
"value": "ruanmengnvsheng"
},
{
"label": "youyanvsheng",
"value": "youyanvsheng"
},
{
"label": "lengyanyujie",
"value": "lengyanyujie"
},
{
"label": "shuangkuaijiejie",
"value": "shuangkuaijiejie"
},
{
"label": "wenjingxuejie",
"value": "wenjingxuejie"
},
{
"label": "linjiajiejie",
"value": "linjiajiejie"
},
{
"label": "linjiameimei",
"value": "linjiameimei"
},
{
"label": "zhixingjiejie",
"value": "zhixingjiejie"
}
],
"type": "tts"
}
]
}
@@ -1,39 +0,0 @@
{
"provider": "Yi",
"list": [
{
"model": "yi-lightning",
"name": "yi-lightning",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 12000,
"maxTemperature": 1,
"vision": false,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
},
{
"model": "yi-vision-v2",
"name": "yi-vision-v2",
"maxContext": 16000,
"maxResponse": 4000,
"quoteMaxToken": 12000,
"maxTemperature": 1,
"vision": true,
"toolChoice": false,
"functionCall": false,
"defaultSystemChatPrompt": "",
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true
}
]
}
@@ -1,22 +0,0 @@
{
"provider": "Jina",
"list": [
{
"type": "embedding",
"model": "jina-embeddings-v3",
"name": "jina-embeddings-v3",
"defaultToken": 512,
"maxToken": 8000
},
{
"model": "jina-reranker-v2-base-multilingual",
"name": "jina-reranker-v2-base-multilingual",
"type": "rerank"
},
{
"model": "jina-reranker-m0",
"name": "jina-reranker-m0",
"type": "rerank"
}
]
}
+54 -57
View File
@@ -1,5 +1,3 @@
import path from 'path';
import * as fs from 'fs';
import { type SystemModelItemType } from '../type';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { MongoSystemModel } from './schema';
@@ -11,34 +9,16 @@ import {
type RerankModelItemType
} from '@fastgpt/global/core/ai/model.d';
import { debounce } from 'lodash';
import {
getModelProvider,
type ModelProviderIdType,
type ModelProviderType
} from '@fastgpt/global/core/ai/provider';
import { getModelProvider } from '@fastgpt/global/core/ai/provider';
import { findModelFromAlldata } from '../model';
import {
reloadFastGPTConfigBuffer,
updateFastGPTConfigBuffer
} from '../../../common/system/config/controller';
import { delay } from '@fastgpt/global/common/system/utils';
import { pluginClient } from '../../../thirdProvider/fastgptPlugin';
import { setCron } from '../../../common/system/cron';
const getModelConfigBaseUrl = () => {
const currentFileUrl = new URL(import.meta.url);
const filePath = decodeURIComponent(
process.platform === 'win32'
? currentFileUrl.pathname.substring(1) // Remove leading slash on Windows
: currentFileUrl.pathname
);
const modelsPath = path.join(path.dirname(filePath), 'provider');
return modelsPath;
};
/*
TODO: 分优先级读取:
1. 有外部挂载目录,则读取外部的
2. 没有外部挂载目录,则读取本地的。然后试图拉取云端的进行覆盖。
*/
export const loadSystemModels = async (init = false) => {
const pushModel = (model: SystemModelItemType) => {
global.systemModelList.push(model);
@@ -108,17 +88,19 @@ export const loadSystemModels = async (init = false) => {
global.systemDefaultModel = {};
try {
const dbModels = await MongoSystemModel.find({}).lean();
// Get model from db and plugin
const [dbModels, systemModels] = await Promise.all([
MongoSystemModel.find({}).lean(),
pluginClient.model.list().then((res) => {
if (res.status === 200) return res.body;
console.error('Get fastGPT plugin model error');
return [];
})
]);
// Load system model from local
const modelsPath = getModelConfigBaseUrl();
const providerList = await fs.promises.readdir(modelsPath);
await Promise.all(
providerList.map(async (name) => {
const fileContent = (await import(`./provider/${name}`))?.default as {
provider: ModelProviderIdType;
list: SystemModelItemType[];
};
systemModels.map(async (model) => {
const mergeObject = (obj1: any, obj2: any) => {
if (!obj1 && !obj2) return undefined;
const formatObj1 = typeof obj1 === 'object' ? obj1 : {};
@@ -126,27 +108,24 @@ export const loadSystemModels = async (init = false) => {
return { ...formatObj1, ...formatObj2 };
};
fileContent.list.forEach((fileModel) => {
const dbModel = dbModels.find((item) => item.model === fileModel.model);
const dbModel = dbModels.find((item) => item.model === model.model);
const modelData: any = {
...fileModel,
...dbModel?.metadata,
// @ts-ignore
defaultConfig: mergeObject(fileModel.defaultConfig, dbModel?.metadata?.defaultConfig),
// @ts-ignore
fieldMap: mergeObject(fileModel.fieldMap, dbModel?.metadata?.fieldMap),
provider: getModelProvider(dbModel?.metadata?.provider || fileContent.provider).id,
type: dbModel?.metadata?.type || fileModel.type,
isCustom: false
};
pushModel(modelData);
});
const modelData: any = {
...model,
...dbModel?.metadata,
// @ts-ignore
defaultConfig: mergeObject(model.defaultConfig, dbModel?.metadata?.defaultConfig),
// @ts-ignore
fieldMap: mergeObject(model.fieldMap, dbModel?.metadata?.fieldMap),
provider: getModelProvider(dbModel?.metadata?.provider || (model.provider as any)).id,
type: dbModel?.metadata?.type || model.type,
isCustom: false
};
pushModel(modelData);
})
);
// Custom model
// Custom model(Not in system config)
dbModels.forEach((dbModel) => {
if (global.systemModelList.find((item) => item.model === dbModel.model)) return;
@@ -190,7 +169,18 @@ export const loadSystemModels = async (init = false) => {
return providerA.order - providerB.order;
});
console.log('Load models success', JSON.stringify(global.systemActiveModelList, null, 2));
console.log(
`Load models success, total: ${global.systemModelList.length}, active: ${global.systemActiveModelList.length}`,
JSON.stringify(
global.systemActiveModelList.map((item) => ({
provider: item.provider,
model: item.model,
name: item.name
})),
null,
2
)
);
} catch (error) {
console.error('Load models error', error);
// @ts-ignore
@@ -205,17 +195,16 @@ export const getSystemModelConfig = async (model: string): Promise<SystemModelIt
if (modelData.isCustom) return Promise.reject('Custom model not data');
// Read file
const fileContent = (await import(`./provider/${modelData.provider}`))?.default as {
provider: ModelProviderType;
list: SystemModelItemType[];
};
const modelDefaulConfig = await pluginClient.model.list().then((res) => {
if (res.status === 200) {
return res.body.find((item) => item.model === model) as SystemModelItemType;
}
const config = fileContent.list.find((item) => item.model === model);
if (!config) return Promise.reject('Model config is not found');
return Promise.reject('Can not get model config from plugin');
});
return {
...config,
...modelDefaulConfig,
provider: modelData.provider,
isCustom: false
};
@@ -246,3 +235,11 @@ export const updatedReloadSystemModel = async () => {
// 3. 延迟1秒,等待其他节点刷新
await delay(1000);
};
export const cronRefreshModels = async () => {
setCron('*/5 * * * *', async () => {
// 1. 更新模型(所有节点都会触发)
await loadSystemModels(true);
// 2. 更新缓存(仅主节点触发)
await updateFastGPTConfigBuffer();
});
};
+14 -2
View File
@@ -1,6 +1,7 @@
import { type AppSchema } from '@fastgpt/global/core/app/type';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { MongoApp } from './schema';
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
import { encryptSecretValue, storeSecretValue } from '../../common/secret/utils';
@@ -19,6 +20,7 @@ import { MongoResourcePermission } from '../../support/permission/schema';
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { removeImageByPath } from '../../common/file/image/controller';
import { mongoSessionRun } from '../../common/mongo/sessionRun';
import { MongoAppLogKeys } from './logs/logkeysSchema';
export const beforeUpdateAppFormat = ({ nodes }: { nodes?: StoreNodeItemType[] }) => {
if (!nodes) return;
@@ -140,6 +142,10 @@ export const onDelOneApp = async ({
fields: '_id avatar'
});
const deletedAppIds = apps
.filter((app) => app.type !== AppTypeEnum.folder)
.map((app) => String(app._id));
// Remove eval job
const evalJobs = await MongoEvaluation.find(
{
@@ -191,6 +197,10 @@ export const onDelOneApp = async ({
resourceId: appId
}).session(session);
await MongoAppLogKeys.deleteMany({
appId
}).session(session);
// delete app
await MongoApp.deleteOne(
{
@@ -204,8 +214,10 @@ export const onDelOneApp = async ({
};
if (session) {
return del(session);
await del(session);
return deletedAppIds;
}
return mongoSessionRun(del);
await mongoSessionRun(del);
return deletedAppIds;
};
@@ -0,0 +1,32 @@
import type { AppLogKeysSchemaType } from '@fastgpt/global/core/app/logs/type';
import { connectionMongo, getMongoModel } from '../../../common/mongo';
import { AppCollectionName } from '../schema';
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
const { Schema } = connectionMongo;
export const AppLogKeysCollectionEnum = 'app_log_keys';
const AppLogKeysSchema = new Schema({
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
required: true
},
appId: {
type: Schema.Types.ObjectId,
ref: AppCollectionName,
required: true
},
logKeys: {
type: Array,
required: true
}
});
AppLogKeysSchema.index({ teamId: 1, appId: 1 });
export const MongoAppLogKeys = getMongoModel<AppLogKeysSchemaType>(
AppLogKeysCollectionEnum,
AppLogKeysSchema
);
+36
View File
@@ -1,9 +1,13 @@
import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js';
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
import type { AppSchema } from '@fastgpt/global/core/app/type';
import { type McpToolConfigType } from '@fastgpt/global/core/app/type';
import { addLog } from '../../common/system/log';
import { retryFn } from '@fastgpt/global/common/system/utils';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { MongoApp } from './schema';
import type { McpToolDataType } from '@fastgpt/global/core/app/mcpTools/type';
export class MCPClient {
private client: Client;
@@ -128,3 +132,35 @@ export class MCPClient {
}
}
}
export const getMCPChildren = async (app: AppSchema) => {
const isNewMcp = !!app.modules[0].toolConfig?.mcpToolSet;
const id = String(app._id);
if (isNewMcp) {
return (
app.modules[0].toolConfig?.mcpToolSet?.toolList.map((item) => ({
...item,
id: `${PluginSourceEnum.mcp}-${id}/${item.name}`,
avatar: app.avatar
})) ?? []
);
} else {
// Old mcp toolset
const children = await MongoApp.find({
teamId: app.teamId,
parentId: id
}).lean();
return children.map((item) => {
const node = item.modules[0];
const toolData: McpToolDataType = node.inputs[0].value;
return {
avatar: app.avatar,
id: `${PluginSourceEnum.mcp}-${id}/${item.name}`,
...toolData
};
});
}
};
+244 -130
View File
@@ -1,6 +1,10 @@
import { type FlowNodeTemplateType } from '@fastgpt/global/core/workflow/type/node.d';
import type {
NodeToolConfigType,
FlowNodeTemplateType
} from '@fastgpt/global/core/workflow/type/node.d';
import {
FlowNodeOutputTypeEnum,
FlowNodeInputTypeEnum,
FlowNodeTypeEnum
} from '@fastgpt/global/core/workflow/node/constant';
import {
@@ -28,7 +32,7 @@ import {
NodeInputKeyEnum
} from '@fastgpt/global/core/workflow/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { getSystemToolList } from '../tool/api';
import { APIGetSystemToolList } from '../tool/api';
import { Types } from '../../../common/mongo';
import type { SystemPluginConfigSchemaType } from './type';
import type {
@@ -37,37 +41,11 @@ import type {
} from '@fastgpt/global/core/workflow/type/io';
import { isProduction } from '@fastgpt/global/common/system/constants';
import { Output_Template_Error_Message } from '@fastgpt/global/core/workflow/template/output';
/**
plugin id rule:
- personal: ObjectId
- commercial: commercial-ObjectId
- systemtool: systemTool-id
(deprecated) community: community-id
*/
export function splitCombinePluginId(id: string) {
const splitRes = id.split('-');
if (splitRes.length === 1) {
// app id
return {
source: PluginSourceEnum.personal,
pluginId: id
};
}
const [source, pluginId] = id.split('-') as [PluginSourceEnum, string | undefined];
if (!source || !pluginId) throw new Error('pluginId not found');
// 兼容4.10.0 之前的插件
if (source === 'community' || id === 'commercial-dalle3') {
return {
source: PluginSourceEnum.systemTool,
pluginId: `${PluginSourceEnum.systemTool}-${pluginId}`
};
}
return { source, pluginId: id };
}
import type { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type';
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
import { getMCPToolRuntimeNode } from '@fastgpt/global/core/app/mcpTools/utils';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { getMCPChildren } from '../mcp';
type ChildAppType = SystemPluginTemplateItemType & {
teamId?: string;
@@ -81,77 +59,102 @@ export const getSystemPluginByIdAndVersionId = async (
pluginId: string,
versionId?: string
): Promise<ChildAppType> => {
const plugin = await (async (): Promise<ChildAppType> => {
const plugin = await getSystemPluginById(pluginId);
const plugin = await getSystemToolById(pluginId);
// Admin selected system tool
if (plugin.associatedPluginId) {
// The verification plugin is set as a system plugin
const systemPlugin = await MongoSystemPlugin.findOne(
{ pluginId: plugin.id, 'customConfig.associatedPluginId': plugin.associatedPluginId },
'associatedPluginId'
).lean();
if (!systemPlugin) return Promise.reject(PluginErrEnum.unExist);
// Admin selected system tool
if (plugin.associatedPluginId) {
// The verification plugin is set as a system plugin
const systemPlugin = await MongoSystemPlugin.findOne(
{ pluginId: plugin.id, 'customConfig.associatedPluginId': plugin.associatedPluginId },
'associatedPluginId'
).lean();
if (!systemPlugin) return Promise.reject(PluginErrEnum.unExist);
const app = await MongoApp.findById(plugin.associatedPluginId).lean();
if (!app) return Promise.reject(PluginErrEnum.unExist);
const version = versionId
? await getAppVersionById({
appId: plugin.associatedPluginId,
versionId,
app
})
: await getAppLatestVersion(plugin.associatedPluginId, app);
if (!version.versionId) return Promise.reject('App version not found');
const isLatest = version.versionId
? await checkIsLatestVersion({
appId: plugin.associatedPluginId,
versionId: version.versionId
})
: true;
return {
...plugin,
workflow: {
nodes: version.nodes,
edges: version.edges,
chatConfig: version.chatConfig
},
version: versionId ? version?.versionId : '',
versionLabel: version?.versionName,
isLatestVersion: isLatest,
teamId: String(app.teamId),
tmbId: String(app.tmbId)
};
}
// System tool
const versionList = (plugin.versionList as SystemPluginTemplateItemType['versionList']) || [];
if (versionList.length === 0) {
return Promise.reject('Can not find plugin version list');
}
const app = await MongoApp.findById(plugin.associatedPluginId).lean();
if (!app) return Promise.reject(PluginErrEnum.unExist);
const version = versionId
? versionList.find((item) => item.value === versionId) ?? versionList[0]
: versionList[0];
const lastVersion = versionList[0];
? await getAppVersionById({
appId: plugin.associatedPluginId,
versionId,
app
})
: await getAppLatestVersion(plugin.associatedPluginId, app);
if (!version.versionId) return Promise.reject('App version not found');
const isLatest = version.versionId
? await checkIsLatestVersion({
appId: plugin.associatedPluginId,
versionId: version.versionId
})
: true;
return {
...plugin,
inputs: version.inputs,
outputs: version.outputs,
version: versionId ? version?.value : '',
versionLabel: versionId ? version?.value : '',
isLatestVersion: !version || !lastVersion || version.value === lastVersion?.value
workflow: {
nodes: version.nodes,
edges: version.edges,
chatConfig: version.chatConfig
},
version: versionId ? version?.versionId : '',
versionLabel: version?.versionName,
isLatestVersion: isLatest,
teamId: String(app.teamId),
tmbId: String(app.tmbId)
};
})();
}
return plugin;
// System toolset
if (plugin.isFolder) {
return {
...plugin,
inputs: [],
outputs: [],
inputList: plugin.inputList,
version: '',
isLatestVersion: true
};
}
// System tool
const versionList = (plugin.versionList as SystemPluginTemplateItemType['versionList']) || [];
if (versionList.length === 0) {
return Promise.reject('Can not find plugin version list');
}
const version = versionId
? versionList.find((item) => item.value === versionId) ?? versionList[0]
: versionList[0];
const lastVersion = versionList[0];
// concat parent (if exists) input config
const parent = plugin.parentId ? await getSystemToolById(plugin.parentId) : undefined;
if (parent && parent.inputList) {
plugin?.inputs?.push({
key: 'system_input_config',
label: '',
renderTypeList: [FlowNodeInputTypeEnum.hidden],
inputList: parent.inputList
});
}
return {
...plugin,
inputs: version.inputs,
outputs: version.outputs,
version: versionId ? version?.value : '',
versionLabel: versionId ? version?.value : '',
isLatestVersion: !version || !lastVersion || version.value === lastVersion?.value
};
};
/* Format plugin to workflow preview node data */
/*
Format plugin to workflow preview node data
Persion workflow/plugin: objectId
Persion mcptoolset: objectId
Persion mcp tool: mcp-parentId/name
System tool/toolset: system-toolId
*/
export async function getChildAppPreviewNode({
appId,
versionId,
@@ -164,6 +167,8 @@ export async function getChildAppPreviewNode({
const { source, pluginId } = splitCombinePluginId(appId);
const app: ChildAppType = await (async () => {
// 1. App
// 2. MCP ToolSets
if (source === PluginSourceEnum.personal) {
const item = await MongoApp.findById(pluginId).lean();
if (!item) return Promise.reject(PluginErrEnum.unExist);
@@ -178,6 +183,17 @@ export async function getChildAppPreviewNode({
})
: true;
if (item.type === AppTypeEnum.toolSet) {
const children = await getMCPChildren(item);
version.nodes[0].toolConfig = {
mcpToolSet: {
toolId: pluginId,
toolList: children,
url: ''
}
};
}
return {
id: String(item._id),
teamId: String(item.teamId),
@@ -201,29 +217,105 @@ export async function getChildAppPreviewNode({
hasTokenFee: false,
pluginOrder: 0
};
} else {
}
// mcp tool
else if (source === PluginSourceEnum.mcp) {
const [parentId, toolName] = pluginId.split('/');
// 1. get parentApp
const item = await MongoApp.findById(parentId).lean();
if (!item) return Promise.reject(PluginErrEnum.unExist);
const version = await getAppVersionById({ appId: parentId, versionId, app: item });
const toolConfig = version.nodes[0].toolConfig?.mcpToolSet;
const tool = toolConfig?.toolList.find((item) => item.name === toolName);
if (!tool || !toolConfig) return Promise.reject(PluginErrEnum.unExist);
return {
avatar: item.avatar,
id: appId,
name: tool.name,
templateType: FlowNodeTemplateTypeEnum.tools,
workflow: {
nodes: [
getMCPToolRuntimeNode({
tool: {
description: tool.description,
inputSchema: tool.inputSchema,
name: tool.name
},
avatar: item.avatar,
parentId: item._id
})
],
edges: []
},
version: '',
isLatestVersion: true
};
}
// 1. System Tools
// 2. System Plugins configured in Pro (has associatedPluginId)
else {
return getSystemPluginByIdAndVersionId(pluginId, versionId);
}
})();
const { flowNodeType, nodeIOConfig } = await (async () => {
const { flowNodeType, nodeIOConfig } = await (async (): Promise<{
flowNodeType: FlowNodeTypeEnum;
nodeIOConfig: {
inputs: FlowNodeInputItemType[];
outputs: FlowNodeOutputItemType[];
toolConfig?: NodeToolConfigType;
showSourceHandle?: boolean;
showTargetHandle?: boolean;
};
}> => {
if (source === PluginSourceEnum.systemTool) {
// system Tool or Toolsets
const children = app.isFolder
? (await getSystemTools()).filter((item) => item.parentId === pluginId)
: [];
return {
flowNodeType: FlowNodeTypeEnum.tool,
flowNodeType: app.isFolder ? FlowNodeTypeEnum.toolSet : FlowNodeTypeEnum.tool,
nodeIOConfig: {
inputs: app.inputs || [],
outputs: app.outputs || [],
inputs: [
...(app.inputList
? [
{
key: NodeInputKeyEnum.systemInputConfig,
label: '',
renderTypeList: [FlowNodeInputTypeEnum.hidden],
inputList: app.inputList
}
]
: []),
...(app.inputs ?? [])
],
outputs: app.outputs ?? [],
toolConfig: {
systemTool: {
toolId: app.id
}
}
...(app.isFolder
? {
systemToolSet: {
toolId: app.id,
toolList: children.map((item) => ({
toolId: item.id,
name: parseI18nString(item.name, lang),
description: parseI18nString(item.intro, lang)
}))
}
}
: { systemTool: { toolId: app.id } })
},
showSourceHandle: app.isFolder ? false : true,
showTargetHandle: app.isFolder ? false : true
}
};
}
// Plugin workflow
if (!!app.workflow.nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)) {
// plugin app
return {
flowNodeType: FlowNodeTypeEnum.pluginModule,
nodeIOConfig: pluginData2FlowNodeIO({ nodes: app.workflow.nodes })
@@ -235,6 +327,7 @@ export async function getChildAppPreviewNode({
!!app.workflow.nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet) &&
app.workflow.nodes.length === 1
) {
// mcp tools
return {
flowNodeType: FlowNodeTypeEnum.toolSet,
nodeIOConfig: toolSetData2FlowNodeIO({ nodes: app.workflow.nodes })
@@ -294,11 +387,15 @@ export async function getChildAppPreviewNode({
System plugin: plugin id
Personal plugin: Version id
*/
export async function getChildAppRuntimeById(
id: string,
versionId?: string,
lang: localeType = 'en'
): Promise<PluginRuntimeType> {
export async function getChildAppRuntimeById({
id,
versionId,
lang = 'en'
}: {
id: string;
versionId?: string;
lang?: localeType;
}): Promise<PluginRuntimeType> {
const app = await (async () => {
const { source, pluginId } = splitCombinePluginId(id);
@@ -351,6 +448,36 @@ export async function getChildAppRuntimeById(
};
}
export async function getSystemPluginRuntimeNodeById({
pluginId,
name,
intro
}: {
pluginId: string;
name: string;
intro: string;
}): Promise<RuntimeNodeItemType> {
const { source } = splitCombinePluginId(pluginId);
if (source === PluginSourceEnum.systemTool) {
const tool = await getSystemPluginByIdAndVersionId(pluginId);
return {
...tool,
name,
intro,
inputs: tool.inputs ?? [],
outputs: tool.outputs ?? [],
flowNodeType: FlowNodeTypeEnum.tool,
nodeId: getNanoid(),
toolConfig: {
systemTool: {
toolId: pluginId
}
}
};
}
return Promise.reject(PluginErrEnum.unExist);
}
const dbPluginFormat = (item: SystemPluginConfigSchemaType): SystemPluginTemplateItemType => {
const { name, avatar, intro, version, weight, templateType, associatedPluginId, userGuide } =
item.customConfig!;
@@ -405,11 +532,11 @@ export const refetchSystemPlugins = () => {
});
};
export const getSystemPlugins = async (): Promise<SystemPluginTemplateItemType[]> => {
export const getSystemTools = async (): Promise<SystemPluginTemplateItemType[]> => {
if (getCachedSystemPlugins().expires > Date.now() && isProduction) {
return getCachedSystemPlugins().data;
} else {
const tools = await getSystemToolList();
const tools = await APIGetSystemToolList();
// 从数据库里加载插件配置进行替换
const systemPluginsArray = await MongoSystemPlugin.find({}).lean();
@@ -436,34 +563,21 @@ export const getSystemPlugins = async (): Promise<SystemPluginTemplateItemType[]
const dbPluginConfig = systemPlugins.get(item.id);
const versionList = (item.versionList as SystemPluginTemplateItemType['versionList']) || [];
const inputs = versionList[0]?.inputs;
const inputs = versionList[0]?.inputs ?? [];
const outputs = versionList[0]?.outputs ?? [];
return {
isActive: item.isActive,
id: item.id,
parentId: item.parentId,
...item,
isFolder: tools.some((tool) => tool.parentId === item.id),
name: item.name,
avatar: item.avatar,
intro: item.intro,
author: item.author,
courseUrl: item.courseUrl,
showStatus: true,
weight: item.weight,
templateType: item.templateType,
originCost: item.originCost,
currentCost: item.currentCost,
hasTokenFee: item.hasTokenFee,
pluginOrder: item.pluginOrder,
workflow: {
nodes: [],
edges: []
},
versionList,
inputList: inputs?.find((input) => input.key === NodeInputKeyEnum.systemInputConfig)
?.inputList as any,
inputs,
outputs,
inputList: item?.secretInputConfig,
hasSystemSecret: !!dbPluginConfig?.inputListVal
};
});
@@ -484,10 +598,10 @@ export const getSystemPlugins = async (): Promise<SystemPluginTemplateItemType[]
}
};
export const getSystemPluginById = async (id: string): Promise<SystemPluginTemplateItemType> => {
export const getSystemToolById = async (id: string): Promise<SystemPluginTemplateItemType> => {
const { source, pluginId } = splitCombinePluginId(id);
if (source === PluginSourceEnum.systemTool) {
const tools = await getSystemPlugins();
const tools = await getSystemTools();
const tool = tools.find((item) => item.id === pluginId);
if (tool) {
return tool;
+2 -2
View File
@@ -1,9 +1,9 @@
import { type ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { type PluginRuntimeType } from '@fastgpt/global/core/app/plugin/type';
import { splitCombinePluginId } from './controller';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
/*
/*
Plugin points calculation:
1. 系统插件/商业版插件:
- 有错误:返回 0
+5 -12
View File
@@ -1,16 +1,9 @@
import createClient, { RunToolWithStream } from '@fastgpt-sdk/plugin';
import { RunToolWithStream } from '@fastgpt-sdk/plugin';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { pluginClient, BASE_URL, TOKEN } from '../../../thirdProvider/fastgptPlugin';
const BASE_URL = process.env.PLUGIN_BASE_URL || '';
const TOKEN = process.env.PLUGIN_TOKEN || '';
const client = createClient({
baseUrl: BASE_URL,
token: TOKEN
});
export async function getSystemToolList() {
const res = await client.tool.list();
export async function APIGetSystemToolList() {
const res = await pluginClient.tool.list();
if (res.status === 200) {
return res.body.map((item) => {
@@ -33,4 +26,4 @@ const runToolInstance = new RunToolWithStream({
baseUrl: BASE_URL,
token: TOKEN
});
export const runSystemTool = runToolInstance.run.bind(runToolInstance);
export const APIRunSystemTool = runToolInstance.run.bind(runToolInstance);
+11 -4
View File
@@ -3,11 +3,13 @@ import { getEmbeddingModel } from '../ai/model';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
import { getChildAppPreviewNode, splitCombinePluginId } from './plugin/controller';
import { getChildAppPreviewNode } from './plugin/controller';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { authAppByTmbId } from '../../support/permission/app/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
import type { localeType } from '@fastgpt/global/common/i18n/type';
export async function listAppDatasetDataByTeamIdAndDatasetIds({
teamId,
@@ -33,12 +35,14 @@ export async function rewriteAppWorkflowToDetail({
nodes,
teamId,
isRoot,
ownerTmbId
ownerTmbId,
lang
}: {
nodes: StoreNodeItemType[];
teamId: string;
isRoot: boolean;
ownerTmbId: string;
lang?: localeType;
}) {
const datasetIdSet = new Set<string>();
@@ -51,8 +55,9 @@ export async function rewriteAppWorkflowToDetail({
try {
const [preview] = await Promise.all([
getChildAppPreviewNode({
appId: pluginId,
versionId: node.version
appId: node.pluginId,
versionId: node.version,
lang
}),
...(source === PluginSourceEnum.personal
? [
@@ -80,6 +85,8 @@ export async function rewriteAppWorkflowToDetail({
node.hasTokenFee = preview.hasTokenFee;
node.hasSystemSecret = preview.hasSystemSecret;
node.toolConfig = preview.toolConfig;
// Latest version
if (!node.version) {
const inputsMap = new Map(node.inputs.map((item) => [item.key, item]));
@@ -361,7 +361,7 @@ const getMultiInput = async ({
};
};
/*
/*
Tool call auth add file prompt to question。
Guide the LLM to call tool.
*/
@@ -10,14 +10,16 @@ import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { MCPClient } from '../../../app/mcp';
import { getSecretValue } from '../../../../common/secret/utils';
import type { McpToolDataType } from '@fastgpt/global/core/app/mcpTools/type';
import { runSystemTool } from '../../../app/tool/api';
import { APIRunSystemTool } from '../../../app/tool/api';
import { MongoSystemPlugin } from '../../../app/plugin/systemPluginSchema';
import { SystemToolInputTypeEnum } from '@fastgpt/global/core/app/systemTool/constants';
import type { StoreSecretValueType } from '@fastgpt/global/common/secret/type';
import { getSystemPluginById } from '../../../app/plugin/controller';
import { getSystemToolById } from '../../../app/plugin/controller';
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { pushTrack } from '../../../../common/middle/tracks/utils';
import { getNodeErrResponse } from '../utils';
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
import { getAppVersionById } from '../../../../core/app/version/controller';
type SystemInputConfigType = {
type: SystemToolInputTypeEnum;
@@ -52,8 +54,8 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
try {
// run system tool
if (systemToolId) {
const tool = await getSystemPluginById(systemToolId);
if (toolConfig?.systemTool?.toolId) {
const tool = await getSystemToolById(toolConfig.systemTool!.toolId);
const inputConfigParams = await (async () => {
switch (params.system_input_config?.type) {
@@ -82,7 +84,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
const formatToolId = tool.id.split('-')[1];
const res = await runSystemTool({
const res = await APIRunSystemTool({
toolId: formatToolId,
inputs,
systemVar: {
@@ -112,6 +114,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
}
}
});
let result = res.output || {};
if (res.error) {
@@ -175,8 +178,33 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
}
]
};
} else if (toolConfig?.mcpTool?.toolId) {
const { pluginId } = splitCombinePluginId(toolConfig.mcpTool.toolId);
const [parentId, toolName] = pluginId.split('/');
const tool = await getAppVersionById({
appId: parentId,
versionId: version
});
const { headerSecret, url } =
tool.nodes[0].toolConfig?.mcpToolSet ?? tool.nodes[0].inputs[0].value;
const mcpClient = new MCPClient({
url,
headers: getSecretValue({
storeSecret: headerSecret
})
});
const result = await mcpClient.toolCall(toolName, params);
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
toolRes: result,
moduleLogo: avatar
},
[DispatchNodeResponseKeyEnum.toolResponses]: result
};
} else {
// mcp tool
// mcp tool (old version compatible)
const { toolData, system_toolData, ...restParams } = params;
const { name: toolName, url, headerSecret } = toolData || system_toolData;
@@ -152,7 +152,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
} = data;
const startTime = Date.now();
rewriteRuntimeWorkFlow(runtimeNodes, runtimeEdges);
await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges });
// 初始化深度和自动增加深度,避免无限嵌套
if (!props.workflowDispatchDeep) {
@@ -212,11 +212,10 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
sendStreamTimerSign();
}
// Add system variables
// Get default variables
variables = {
...getSystemVariable(data),
...externalProvider.externalWorkflowVariables,
...variables
...getSystemVariables(data)
};
}
@@ -846,23 +845,35 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
}
/* get system variable */
const getSystemVariable = ({
const getSystemVariables = ({
timezone,
runningAppInfo,
chatId,
responseChatItemId,
histories = [],
uid,
chatConfig
chatConfig,
variables
}: Props): SystemVariablesType => {
const variables = chatConfig?.variables || [];
const variablesMap = variables.reduce<Record<string, any>>((acc, item) => {
acc[item.key] = valueTypeFormat(item.defaultValue, item.valueType);
// Get global variables(Label -> key; Key -> key)
const globalVariables = chatConfig?.variables || [];
const variablesMap = globalVariables.reduce<Record<string, any>>((acc, item) => {
// API
if (variables[item.label] !== undefined) {
acc[item.key] = valueTypeFormat(variables[item.label], item.valueType);
}
// Web
else if (variables[item.key] !== undefined) {
acc[item.key] = valueTypeFormat(variables[item.key], item.valueType);
} else {
acc[item.key] = valueTypeFormat(item.defaultValue, item.valueType);
}
return acc;
}, {});
return {
...variablesMap,
// System var:
userId: uid,
appId: String(runningAppInfo.id),
chatId,
@@ -1,4 +1,7 @@
import { getPluginInputsFromStoreNodes } from '@fastgpt/global/core/app/plugin/utils';
import {
getPluginInputsFromStoreNodes,
splitCombinePluginId
} from '@fastgpt/global/core/app/plugin/utils';
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
@@ -15,9 +18,8 @@ import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { computedPluginUsage } from '../../../app/plugin/utils';
import { filterSystemVariables, getNodeErrResponse } from '../utils';
import { getPluginRunUserQuery } from '@fastgpt/global/core/workflow/utils';
import type { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { getChildAppRuntimeById, splitCombinePluginId } from '../../../app/plugin/controller';
import type { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { getChildAppRuntimeById } from '../../../app/plugin/controller';
import { dispatchWorkFlow } from '../index';
import { getUserChatInfoAndAuthTeamPoints } from '../../../../support/permission/auth/team';
import { dispatchRunTool } from '../child/runTool';
@@ -66,7 +68,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
});
}
/*
/*
1. Team app
2. Admin selected system tool
*/
@@ -79,7 +81,7 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
per: ReadPermissionVal
});
plugin = await getChildAppRuntimeById(pluginId, version);
plugin = await getChildAppRuntimeById({ id: pluginId, versionId: version });
const outputFilterMap =
plugin.nodes
@@ -1,7 +1,7 @@
import { getErrText } from '@fastgpt/global/common/error/utils';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import {
type RuntimeEdgeItemType,
type RuntimeNodeItemType,
@@ -17,7 +17,12 @@ import { getNanoid } from '@fastgpt/global/common/string/tools';
import { type SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { getMCPToolRuntimeNode } from '@fastgpt/global/core/app/mcpTools/utils';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import type { McpToolSetDataType } from '@fastgpt/global/core/app/mcpTools/type';
import {
getSystemPluginRuntimeNodeById,
getSystemTools
} from '../../../core/app/plugin/controller';
import { MongoApp } from '../../../core/app/schema';
import { getMCPChildren } from '../../../core/app/mcp';
export const getWorkflowResponseWrite = ({
res,
@@ -151,10 +156,19 @@ export const formatHttpError = (error: any) => {
};
};
export const rewriteRuntimeWorkFlow = (
nodes: RuntimeNodeItemType[],
edges: RuntimeEdgeItemType[]
) => {
/**
* ToolSet node will be replaced by Children Tool Nodes.
* @param nodes
* @param edges
* @returns
*/
export const rewriteRuntimeWorkFlow = async ({
nodes,
edges
}: {
nodes: RuntimeNodeItemType[];
edges: RuntimeEdgeItemType[];
}) => {
const toolSetNodes = nodes.filter((node) => node.flowNodeType === FlowNodeTypeEnum.toolSet);
if (toolSetNodes.length === 0) {
@@ -165,35 +179,63 @@ export const rewriteRuntimeWorkFlow = (
for (const toolSetNode of toolSetNodes) {
nodeIdsToRemove.add(toolSetNode.nodeId);
const toolSetValue = toolSetNode.inputs[0]?.value as McpToolSetDataType | undefined;
if (!toolSetValue) continue;
const toolList = toolSetValue.toolList;
const url = toolSetValue.url;
const headerSecret = toolSetValue.headerSecret;
const systemToolId = toolSetNode.toolConfig?.systemToolSet?.toolId;
const mcpToolsetVal = toolSetNode.toolConfig?.mcpToolSet ?? toolSetNode.inputs[0].value;
const incomingEdges = edges.filter((edge) => edge.target === toolSetNode.nodeId);
for (const tool of toolList) {
const newToolNode = getMCPToolRuntimeNode({
avatar: toolSetNode.avatar,
tool,
url,
headerSecret
});
nodes.push({ ...newToolNode, name: `${toolSetNode.name} / ${tool.name}` });
const pushEdges = (nodeId: string) => {
for (const inEdge of incomingEdges) {
edges.push({
source: inEdge.source,
target: newToolNode.nodeId,
target: nodeId,
sourceHandle: inEdge.sourceHandle,
targetHandle: 'selectedTools',
status: inEdge.status
});
}
};
// systemTool
if (systemToolId) {
const toolsetInputConfig = toolSetNode.inputs.find(
(item) => item.key === NodeInputKeyEnum.systemInputConfig
);
const tools = await getSystemTools();
const children = tools.filter((item) => item.parentId === systemToolId);
for (const child of children) {
const toolListItem = toolSetNode.toolConfig?.systemToolSet?.toolList.find(
(item) => item.toolId === child.id
)!;
const newNode = await getSystemPluginRuntimeNodeById({
pluginId: child.id,
name: toolListItem?.name,
intro: toolListItem?.description
});
const newNodeInputConfig = newNode.inputs.find(
(item) => item.key === NodeInputKeyEnum.systemInputConfig
);
if (newNodeInputConfig) {
newNodeInputConfig.value = toolsetInputConfig?.value;
}
nodes.push(newNode);
pushEdges(newNode.nodeId);
}
} else if (mcpToolsetVal) {
const app = await MongoApp.findOne({ _id: toolSetNode.pluginId }).lean();
if (!app) continue;
const toolList = await getMCPChildren(app);
for (const tool of toolList) {
const newToolNode = getMCPToolRuntimeNode({
avatar: toolSetNode.avatar,
tool,
// New ?? Old
parentId: mcpToolsetVal.toolId ?? toolSetNode.pluginId
});
nodes.push({ ...newToolNode, name: `${toolSetNode.name}/${tool.name}` });
pushEdges(newToolNode.nodeId);
}
}
}
+1 -1
View File
@@ -3,7 +3,7 @@
"version": "1.0.0",
"type": "module",
"dependencies": {
"@fastgpt-sdk/plugin": "^0.1.2",
"@fastgpt-sdk/plugin": "^0.1.4",
"@fastgpt/global": "workspace:*",
"@modelcontextprotocol/sdk": "^1.12.1",
"@node-rs/jieba": "2.0.1",
@@ -10,10 +10,10 @@ import { AppPermission } from '@fastgpt/global/support/permission/app/controller
import { type PermissionValueType } from '@fastgpt/global/support/permission/type';
import { AppFolderTypeList } from '@fastgpt/global/core/app/constants';
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
import { splitCombinePluginId } from '../../../core/app/plugin/controller';
import { PluginSourceEnum } from '@fastgpt/global/core/app/plugin/constants';
import { type AuthModeType, type AuthResponseType } from '../type';
import { AppDefaultPermissionVal } from '@fastgpt/global/support/permission/app/constant';
import { splitCombinePluginId } from '@fastgpt/global/core/app/plugin/utils';
export const authPluginByTmbId = async ({
tmbId,
@@ -46,7 +46,7 @@ export const checkTeamAppLimit = async (teamId: string, amount = 1) => {
MongoApp.countDocuments({
teamId,
type: {
$in: [AppTypeEnum.simple, AppTypeEnum.workflow, AppTypeEnum.plugin, AppTypeEnum.tool]
$in: [AppTypeEnum.simple, AppTypeEnum.workflow, AppTypeEnum.plugin, AppTypeEnum.toolSet]
}
})
]);
@@ -59,7 +59,7 @@ export const checkTeamAppLimit = async (teamId: string, amount = 1) => {
if (global?.licenseData?.maxApps && typeof global?.licenseData?.maxApps === 'number') {
const totalApps = await MongoApp.countDocuments({
type: {
$in: [AppTypeEnum.simple, AppTypeEnum.workflow, AppTypeEnum.plugin, AppTypeEnum.tool]
$in: [AppTypeEnum.simple, AppTypeEnum.workflow, AppTypeEnum.plugin, AppTypeEnum.toolSet]
}
});
if (totalApps >= global.licenseData.maxApps) {
+20 -10
View File
@@ -139,17 +139,27 @@ export const useDoc2xServer = ({ apiKey }: { apiKey: string }) => {
// Finifsh
if (result_data.status === 'success') {
const cleanedText = result_data.result.pages
.map((page) => page.md)
.join('')
.replace(/\\[\(\)]/g, '$')
.replace(/\\[\[\]]/g, '$$')
.replace(/<img\s+src="([^"]+)"(?:\s*\?[^>]*)?(?:\s*\/>|>)/g, '![img]($1)')
.replace(/<!-- Media -->/g, '')
.replace(/<!-- Footnote -->/g, '')
.replace(/<!-- Meanless:[\s\S]*?-->/g, '')
.replace(/<!-- figureText:[\s\S]*?-->/g, '')
.replace(/\$(.+?)\s+\\tag\{(.+?)\}\$/g, '$$$1 \\qquad \\qquad ($2)$$')
.replace(/\\text\{([^}]*?)(\b\w+)_(\w+\b)([^}]*?)\}/g, '\\text{$1$2\\_$3$4}');
const remainingTags = cleanedText.match(/<!--[\s\S]*?-->/g);
if (remainingTags) {
addLog.warn(`[Doc2x] Remaining dirty tags after cleaning:`, {
count: remainingTags.length,
tags: remainingTags.slice(0, 3)
});
}
return {
text: result_data.result.pages
.map((page) => page.md)
.join('')
.replace(/\\[\(\)]/g, '$')
.replace(/\\[\[\]]/g, '$$')
.replace(/<img\s+src="([^"]+)"(?:\s*\?[^>]*)?(?:\s*\/>|>)/g, '![img]($1)')
.replace(/<!-- Media -->/g, '')
.replace(/<!-- Footnote -->/g, '')
.replace(/\$(.+?)\s+\\tag\{(.+?)\}\$/g, '$$$1 \\qquad \\qquad ($2)$$')
.replace(/\\text\{([^}]*?)(\b\w+)_(\w+\b)([^}]*?)\}/g, '\\text{$1$2\\_$3$4}'),
text: cleanedText,
pages: result_data.result.pages.length
};
}
@@ -0,0 +1,9 @@
import createClient from '@fastgpt-sdk/plugin';
export const BASE_URL = process.env.PLUGIN_BASE_URL || '';
export const TOKEN = process.env.PLUGIN_TOKEN || '';
export const pluginClient = createClient({
baseUrl: BASE_URL,
token: TOKEN
});
@@ -20,18 +20,16 @@ export const readXlsxRawText = async ({
const rawText = format2Csv.map((item) => item.csvText).join('\n');
const formatText = format2Csv
.map((item) => {
const csvArr = Papa.parse(item.csvText).data as string[][];
const header = csvArr[0];
const formatText = result
.map(({ data }) => {
const header = data[0];
if (!header) return;
const formatText = `| ${header.join(' | ')} |
| ${header.map(() => '---').join(' | ')} |
${csvArr
${data
.slice(1)
.map((row) => `| ${row.map((item) => item.replace(/\n/g, '\\n')).join(' | ')} |`)
.map((row) => `| ${row.map((cell) => String(cell).replace(/\n/g, '\\n')).join(' | ')} |`)
.join('\n')}`;
return formatText;
@@ -2,6 +2,7 @@ import { parentPort } from 'worker_threads';
import type { SplitProps } from '@fastgpt/global/common/string/textSplitter';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { workerResponse } from '../controller';
import { delay } from '@fastgpt/global/common/system/utils';
parentPort?.on('message', async (props: SplitProps) => {
const result = splitText2Chunks(props);