feat: qwen qwq model config (#4008)

This commit is contained in:
Archer
2025-03-06 20:18:33 +08:00
committed by GitHub
parent 54fde7630c
commit a1b114e426
5 changed files with 86 additions and 37 deletions

View File

@@ -173,6 +173,7 @@ curl --location --request POST 'https://{{host}}/api/admin/initv490' \
1. PDF增强解析交互添加到页面上。同时内嵌 Doc2x 服务,可直接使用 Doc2x 服务解析 PDF 文件。
2. 图片自动标注,同时修改知识库文件上传部分数据逻辑和交互。
3. pg vector 插件升级 0.8.0 版本,引入迭代搜索,减少部分数据无法被检索的情况。
4. 新增 qwen-qwq 系列模型配置。
## ⚙️ 优化

View File

@@ -122,6 +122,54 @@
"showTopP": true,
"showStopSign": true
},
{
"model": "qwq-plus",
"name": "qwq-plus",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": null,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"customCQPrompt": "",
"usedInExtractFields": false,
"usedInQueryExtension": false,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
},
{
"model": "qwq-32b",
"name": "qwq-32b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": null,
"vision": false,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": false,
"usedInClassify": false,
"customCQPrompt": "",
"usedInExtractFields": false,
"usedInQueryExtension": false,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": false,
"showStopSign": false
},
{
"model": "qwen-coder-turbo",
"name": "qwen-coder-turbo",

View File

@@ -264,7 +264,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
})();
if (!answerText) {
if (!answerText && !reasoningText) {
return Promise.reject(getEmptyResponseTip());
}

View File

@@ -80,8 +80,10 @@ const testLLMModel = async (model: LLMModelItemType) => {
});
const responseText = response.choices?.[0]?.message?.content;
// @ts-ignore
const reasoning_content = response.choices?.[0]?.message?.reasoning_content;
if (!responseText) {
if (!responseText && !reasoning_content) {
return Promise.reject('Model response empty');
}

View File

@@ -204,7 +204,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
});
// save chat
if (!res.closed) {
const isInteractiveRequest = !!getLastInteractiveValue(histories);
const { text: userInteractiveVal } = chatValue2RuntimePrompt(userQuestion.value);
@@ -242,7 +241,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
content: [userQuestion, aiResponse]
});
}
}
createChatUsage({
appName,