From 6d4b331db9dc7d315ba74a3238d248fe21236f8c Mon Sep 17 00:00:00 2001 From: Fengrui Liu Date: Mon, 18 Mar 2024 19:47:40 +0800 Subject: [PATCH] Fix history in classifyQuestion and extract modules (#1012) * Fix history in classifyQuestion and extract modules * Add chatValue2RuntimePrompt import and update text formatting --- .../src/service/moduleDispatch/agent/classifyQuestion.ts | 7 +++++-- projects/app/src/service/moduleDispatch/agent/extract.ts | 7 ++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/projects/app/src/service/moduleDispatch/agent/classifyQuestion.ts b/projects/app/src/service/moduleDispatch/agent/classifyQuestion.ts index 6db1d3f28..97b5e5e3f 100644 --- a/projects/app/src/service/moduleDispatch/agent/classifyQuestion.ts +++ b/projects/app/src/service/moduleDispatch/agent/classifyQuestion.ts @@ -24,6 +24,7 @@ import { ChatCompletionTool } from '@fastgpt/global/core/ai/type'; import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type'; +import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt'; type Props = ModuleDispatchProps<{ [ModuleInputKeyEnum.aiModel]: string; @@ -125,7 +126,7 @@ const getFunctionCallSchema = ({ ? `<背景知识> ${systemPrompt} - + 问题: "${userChatInput}" ` : userChatInput @@ -284,7 +285,9 @@ const completions = async ({ typeList: agents .map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`) .join('\n'), - history: histories.map((item) => `${item.obj}:${item.value}`).join('\n'), + history: histories + .map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`) + .join('\n'), question: userChatInput }) } diff --git a/projects/app/src/service/moduleDispatch/agent/extract.ts b/projects/app/src/service/moduleDispatch/agent/extract.ts index db6b13359..a76fe551b 100644 --- a/projects/app/src/service/moduleDispatch/agent/extract.ts +++ b/projects/app/src/service/moduleDispatch/agent/extract.ts @@ -25,6 +25,7 @@ import { } from '@fastgpt/global/core/ai/type'; import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants'; import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type'; +import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt'; type Props = ModuleDispatchProps<{ [ModuleInputKeyEnum.history]?: ChatItemType[]; @@ -160,7 +161,7 @@ const getFunctionCallSchema = ({ - 字符串不要换行。 - 结合上下文和当前问题进行获取。 """ - + 当前问题: "${content}"` } } @@ -325,8 +326,8 @@ const completions = async ({ }}` ) .join('\n'), - text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')} - Human: ${content}` + text: `${histories.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`).join('\n')} +Human: ${content}` }) } }