mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-20 02:34:52 +00:00
Fix history in classifyQuestion and extract modules (#1012)
* Fix history in classifyQuestion and extract modules * Add chatValue2RuntimePrompt import and update text formatting
This commit is contained in:
@@ -24,6 +24,7 @@ import {
|
|||||||
ChatCompletionTool
|
ChatCompletionTool
|
||||||
} from '@fastgpt/global/core/ai/type';
|
} from '@fastgpt/global/core/ai/type';
|
||||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||||
|
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||||
|
|
||||||
type Props = ModuleDispatchProps<{
|
type Props = ModuleDispatchProps<{
|
||||||
[ModuleInputKeyEnum.aiModel]: string;
|
[ModuleInputKeyEnum.aiModel]: string;
|
||||||
@@ -125,7 +126,7 @@ const getFunctionCallSchema = ({
|
|||||||
? `<背景知识>
|
? `<背景知识>
|
||||||
${systemPrompt}
|
${systemPrompt}
|
||||||
</背景知识>
|
</背景知识>
|
||||||
|
|
||||||
问题: "${userChatInput}"
|
问题: "${userChatInput}"
|
||||||
`
|
`
|
||||||
: userChatInput
|
: userChatInput
|
||||||
@@ -284,7 +285,9 @@ const completions = async ({
|
|||||||
typeList: agents
|
typeList: agents
|
||||||
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
|
.map((item) => `{"questionType": "${item.value}", "typeId": "${item.key}"}`)
|
||||||
.join('\n'),
|
.join('\n'),
|
||||||
history: histories.map((item) => `${item.obj}:${item.value}`).join('\n'),
|
history: histories
|
||||||
|
.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`)
|
||||||
|
.join('\n'),
|
||||||
question: userChatInput
|
question: userChatInput
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@@ -25,6 +25,7 @@ import {
|
|||||||
} from '@fastgpt/global/core/ai/type';
|
} from '@fastgpt/global/core/ai/type';
|
||||||
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants';
|
||||||
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
import { DispatchNodeResultType } from '@fastgpt/global/core/module/runtime/type';
|
||||||
|
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||||
|
|
||||||
type Props = ModuleDispatchProps<{
|
type Props = ModuleDispatchProps<{
|
||||||
[ModuleInputKeyEnum.history]?: ChatItemType[];
|
[ModuleInputKeyEnum.history]?: ChatItemType[];
|
||||||
@@ -160,7 +161,7 @@ const getFunctionCallSchema = ({
|
|||||||
- 字符串不要换行。
|
- 字符串不要换行。
|
||||||
- 结合上下文和当前问题进行获取。
|
- 结合上下文和当前问题进行获取。
|
||||||
"""
|
"""
|
||||||
|
|
||||||
当前问题: "${content}"`
|
当前问题: "${content}"`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -325,8 +326,8 @@ const completions = async ({
|
|||||||
}}`
|
}}`
|
||||||
)
|
)
|
||||||
.join('\n'),
|
.join('\n'),
|
||||||
text: `${histories.map((item) => `${item.obj}:${item.value}`).join('\n')}
|
text: `${histories.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`).join('\n')}
|
||||||
Human: ${content}`
|
Human: ${content}`
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user