mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-21 11:43:56 +00:00
update prompt version (#4242)
* sync collection * remove lock * update prompt version
This commit is contained in:
@@ -1,13 +1,13 @@
|
||||
import { PromptTemplateItem } from '../type.d';
|
||||
import { i18nT } from '../../../../web/i18n/utils';
|
||||
import { getPromptByVersion } from './agent';
|
||||
import { getPromptByVersion } from './utils';
|
||||
|
||||
export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
|
||||
{
|
||||
title: i18nT('app:template.standard_template'),
|
||||
desc: i18nT('app:template.standard_template_des'),
|
||||
value: {
|
||||
['4.9.0']: `{
|
||||
['4.9.2']: `{
|
||||
"sourceName": "{{source}}",
|
||||
"updateTime": "{{updateTime}}",
|
||||
"content": "{{q}}\n{{a}}"
|
||||
@@ -19,7 +19,7 @@ export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.qa_template'),
|
||||
desc: i18nT('app:template.qa_template_des'),
|
||||
value: {
|
||||
['4.9.0']: `<Question>
|
||||
['4.9.2']: `<Question>
|
||||
{{q}}
|
||||
</Question>
|
||||
<Answer>
|
||||
@@ -31,7 +31,7 @@ export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.standard_strict'),
|
||||
desc: i18nT('app:template.standard_strict_des'),
|
||||
value: {
|
||||
['4.9.0']: `{
|
||||
['4.9.2']: `{
|
||||
"sourceName": "{{source}}",
|
||||
"updateTime": "{{updateTime}}",
|
||||
"content": "{{q}}\n{{a}}"
|
||||
@@ -43,7 +43,7 @@ export const Prompt_QuoteTemplateList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.hard_strict'),
|
||||
desc: i18nT('app:template.hard_strict_des'),
|
||||
value: {
|
||||
['4.9.0']: `<Question>
|
||||
['4.9.2']: `<Question>
|
||||
{{q}}
|
||||
</Question>
|
||||
<Answer>
|
||||
@@ -64,7 +64,7 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.standard_template'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
['4.9.2']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
|
||||
<Reference>
|
||||
{{quote}}
|
||||
@@ -84,7 +84,7 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.qa_template'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `使用 <QA></QA> 标记中的问答对进行回答。
|
||||
['4.9.2']: `使用 <QA></QA> 标记中的问答对进行回答。
|
||||
|
||||
<QA>
|
||||
{{quote}}
|
||||
@@ -103,7 +103,7 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.standard_strict'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
['4.9.2']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
|
||||
<Reference>
|
||||
{{quote}}
|
||||
@@ -127,7 +127,7 @@ export const Prompt_userQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.hard_strict'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
|
||||
['4.9.2']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
|
||||
|
||||
<QA>
|
||||
{{quote}}
|
||||
@@ -157,7 +157,7 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.standard_template'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
['4.9.2']: `使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
|
||||
<Reference>
|
||||
{{quote}}
|
||||
@@ -175,7 +175,7 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.qa_template'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `使用 <QA></QA> 标记中的问答对进行回答。
|
||||
['4.9.2']: `使用 <QA></QA> 标记中的问答对进行回答。
|
||||
|
||||
<QA>
|
||||
{{quote}}
|
||||
@@ -192,7 +192,7 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.standard_strict'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
['4.9.2']: `忘记你已有的知识,仅使用 <Reference></Reference> 标记中的内容作为本次对话的参考:
|
||||
|
||||
<Reference>
|
||||
{{quote}}
|
||||
@@ -214,7 +214,7 @@ export const Prompt_systemQuotePromptList: PromptTemplateItem[] = [
|
||||
title: i18nT('app:template.hard_strict'),
|
||||
desc: '',
|
||||
value: {
|
||||
['4.9.0']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
|
||||
['4.9.2']: `忘记你已有的知识,仅使用 <QA></QA> 标记中的问答对进行回答。
|
||||
|
||||
<QA>
|
||||
{{quote}}
|
||||
@@ -249,7 +249,7 @@ export const getQuotePrompt = (version?: string, role: 'user' | 'system' = 'user
|
||||
// Document quote prompt
|
||||
export const getDocumentQuotePrompt = (version: string) => {
|
||||
const promptMap = {
|
||||
['4.9.0']: `将 <FilesContent></FilesContent> 中的内容作为本次对话的参考:
|
||||
['4.9.2']: `将 <FilesContent></FilesContent> 中的内容作为本次对话的参考:
|
||||
<FilesContent>
|
||||
{{quote}}
|
||||
</FilesContent>
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { getPromptByVersion } from './utils';
|
||||
|
||||
export const Prompt_AgentQA = {
|
||||
description: `<Context></Context> 标记中是一段文本,学习和分析它,并整理学习成果:
|
||||
- 提出问题并给出每个问题的答案。
|
||||
@@ -25,29 +27,9 @@ A2:
|
||||
`
|
||||
};
|
||||
|
||||
export const getPromptByVersion = (version?: string, promptMap: Record<string, string> = {}) => {
|
||||
const versions = Object.keys(promptMap).sort((a, b) => {
|
||||
const [majorA, minorA, patchA] = a.split('.').map(Number);
|
||||
const [majorB, minorB, patchB] = b.split('.').map(Number);
|
||||
|
||||
if (majorA !== majorB) return majorB - majorA;
|
||||
if (minorA !== minorB) return minorB - minorA;
|
||||
return patchB - patchA;
|
||||
});
|
||||
|
||||
if (!version) {
|
||||
return promptMap[versions[0]];
|
||||
}
|
||||
|
||||
if (version in promptMap) {
|
||||
return promptMap[version];
|
||||
}
|
||||
return promptMap[versions[versions.length - 1]];
|
||||
};
|
||||
|
||||
export const getExtractJsonPrompt = (version?: string) => {
|
||||
const promptMap: Record<string, string> = {
|
||||
['4.8.1']: `你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。
|
||||
['4.9.2']: `你可以从 <对话记录></对话记录> 中提取指定 Json 信息,你仅需返回 Json 字符串,无需回答问题。
|
||||
<提取要求>
|
||||
{{description}}
|
||||
</提取要求>
|
||||
@@ -74,7 +56,7 @@ export const getExtractJsonPrompt = (version?: string) => {
|
||||
|
||||
export const getExtractJsonToolPrompt = (version?: string) => {
|
||||
const promptMap: Record<string, string> = {
|
||||
['4.8.1']: `我正在执行一个函数,需要你提供一些参数,请以 JSON 字符串格式返回这些参数,要求:
|
||||
['4.9.2']: `我正在执行一个函数,需要你提供一些参数,请以 JSON 字符串格式返回这些参数,要求:
|
||||
"""
|
||||
- {{description}}
|
||||
- 不是每个参数都是必须生成的,如果没有合适的参数值,不要生成该参数,或返回空字符串。
|
||||
@@ -90,7 +72,7 @@ export const getExtractJsonToolPrompt = (version?: string) => {
|
||||
|
||||
export const getCQPrompt = (version?: string) => {
|
||||
const promptMap: Record<string, string> = {
|
||||
['4.8.1']: `请帮我执行一个"问题分类"任务,将问题分类为以下几种类型之一:
|
||||
['4.9.2']: `请帮我执行一个"问题分类"任务,将问题分类为以下几种类型之一:
|
||||
|
||||
"""
|
||||
{{typeList}}
|
||||
@@ -114,8 +96,7 @@ export const getCQPrompt = (version?: string) => {
|
||||
return getPromptByVersion(version, promptMap);
|
||||
};
|
||||
|
||||
export const getQuestionGuidePrompt = () => {
|
||||
return `You are an AI assistant tasked with predicting the user's next question based on the conversation history. Your goal is to generate 3 potential questions that will guide the user to continue the conversation. When generating these questions, adhere to the following rules:
|
||||
export const QuestionGuidePrompt = `You are an AI assistant tasked with predicting the user's next question based on the conversation history. Your goal is to generate 3 potential questions that will guide the user to continue the conversation. When generating these questions, adhere to the following rules:
|
||||
|
||||
1. Use the same language as the user's last question in the conversation history.
|
||||
2. Keep each question under 20 characters in length.
|
||||
@@ -123,8 +104,5 @@ export const getQuestionGuidePrompt = () => {
|
||||
Analyze the conversation history provided to you and use it as context to generate relevant and engaging follow-up questions. Your predictions should be logical extensions of the current topic or related areas that the user might be interested in exploring further.
|
||||
|
||||
Remember to maintain consistency in tone and style with the existing conversation while providing diverse options for the user to choose from. Your goal is to keep the conversation flowing naturally and help the user delve deeper into the subject matter or explore related topics.`;
|
||||
};
|
||||
|
||||
export const getQuestionGuideFooterPrompt = () => {
|
||||
return `Please strictly follow the format rules: \nReturn questions in JSON format: ['Question 1', 'Question 2', 'Question 3']. Your output: `;
|
||||
};
|
||||
export const QuestionGuideFooterPrompt = `Please strictly follow the format rules: \nReturn questions in JSON format: ['Question 1', 'Question 2', 'Question 3']. Your output: `;
|
||||
|
19
packages/global/core/ai/prompt/utils.ts
Normal file
19
packages/global/core/ai/prompt/utils.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
export const getPromptByVersion = (version?: string, promptMap: Record<string, string> = {}) => {
|
||||
const versions = Object.keys(promptMap).sort((a, b) => {
|
||||
const [majorA, minorA, patchA] = a.split('.').map(Number);
|
||||
const [majorB, minorB, patchB] = b.split('.').map(Number);
|
||||
|
||||
if (majorA !== majorB) return majorB - majorA;
|
||||
if (minorA !== minorB) return minorB - minorA;
|
||||
return patchB - patchA;
|
||||
});
|
||||
|
||||
if (!version) {
|
||||
return promptMap[versions[0]];
|
||||
}
|
||||
|
||||
if (version in promptMap) {
|
||||
return promptMap[version];
|
||||
}
|
||||
return promptMap[versions[versions.length - 1]];
|
||||
};
|
@@ -55,7 +55,7 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
showStatus: true,
|
||||
isTool: true,
|
||||
courseUrl: '/docs/guide/workbench/workflow/ai_chat/',
|
||||
version: '490',
|
||||
version: '4.9.0',
|
||||
inputs: [
|
||||
Input_Template_SettingAiModel,
|
||||
// --- settings modal
|
||||
|
@@ -30,7 +30,7 @@ export const ClassifyQuestionModule: FlowNodeTemplateType = {
|
||||
name: i18nT('workflow:question_classification'),
|
||||
intro: i18nT('workflow:intro_question_classification'),
|
||||
showStatus: true,
|
||||
version: '481',
|
||||
version: '4.9.2',
|
||||
courseUrl: '/docs/guide/workbench/workflow/question_classify/',
|
||||
inputs: [
|
||||
{
|
||||
|
@@ -27,7 +27,7 @@ export const ContextExtractModule: FlowNodeTemplateType = {
|
||||
showStatus: true,
|
||||
isTool: true,
|
||||
courseUrl: '/docs/guide/workbench/workflow/content_extract/',
|
||||
version: '481',
|
||||
version: '4.9.2',
|
||||
inputs: [
|
||||
{
|
||||
...Input_Template_SelectAIModel,
|
||||
|
@@ -31,7 +31,7 @@ export const DatasetSearchModule: FlowNodeTemplateType = {
|
||||
showStatus: true,
|
||||
isTool: true,
|
||||
courseUrl: '/docs/guide/workbench/workflow/dataset_search/',
|
||||
version: '481',
|
||||
version: '4.9.2',
|
||||
inputs: [
|
||||
{
|
||||
key: NodeInputKeyEnum.datasetSelectList,
|
||||
|
@@ -23,7 +23,7 @@ export const ReadFilesNode: FlowNodeTemplateType = {
|
||||
name: i18nT('app:workflow.read_files'),
|
||||
intro: i18nT('app:workflow.read_files_tip'),
|
||||
showStatus: true,
|
||||
version: '4812',
|
||||
version: '4.9.2',
|
||||
isTool: false,
|
||||
courseUrl: '/docs/guide/course/fileinput/',
|
||||
inputs: [
|
||||
|
@@ -33,7 +33,7 @@ export const ToolModule: FlowNodeTemplateType = {
|
||||
intro: i18nT('workflow:template.tool_call_intro'),
|
||||
showStatus: true,
|
||||
courseUrl: '/docs/guide/workbench/workflow/tool/',
|
||||
version: '4813',
|
||||
version: '4.9.2',
|
||||
inputs: [
|
||||
{
|
||||
...Input_Template_SettingAiModel,
|
||||
|
@@ -4,8 +4,8 @@ import { countGptMessagesTokens, countPromptTokens } from '../../../common/strin
|
||||
import { loadRequestMessages } from '../../chat/utils';
|
||||
import { llmCompletionsBodyFormat } from '../utils';
|
||||
import {
|
||||
getQuestionGuideFooterPrompt,
|
||||
getQuestionGuidePrompt
|
||||
QuestionGuidePrompt,
|
||||
QuestionGuideFooterPrompt
|
||||
} from '@fastgpt/global/core/ai/prompt/agent';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import json5 from 'json5';
|
||||
@@ -27,7 +27,7 @@ export async function createQuestionGuide({
|
||||
...messages,
|
||||
{
|
||||
role: 'user',
|
||||
content: `${customPrompt || getQuestionGuidePrompt()}\n${getQuestionGuideFooterPrompt()}`
|
||||
content: `${customPrompt || QuestionGuidePrompt}\n${QuestionGuideFooterPrompt}`
|
||||
}
|
||||
];
|
||||
const requestMessages = await loadRequestMessages({
|
||||
|
@@ -14,8 +14,8 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import AIModelSelector from '@/components/Select/AIModelSelector';
|
||||
import CustomPromptEditor from '@fastgpt/web/components/common/Textarea/CustomPromptEditor';
|
||||
import {
|
||||
getQuestionGuideFooterPrompt,
|
||||
getQuestionGuidePrompt
|
||||
QuestionGuideFooterPrompt,
|
||||
QuestionGuidePrompt
|
||||
} from '@fastgpt/global/core/ai/prompt/agent';
|
||||
|
||||
// question generator config
|
||||
@@ -168,7 +168,7 @@ const QGConfigModal = ({
|
||||
}
|
||||
}}
|
||||
>
|
||||
{customPrompt || getQuestionGuidePrompt()}
|
||||
{customPrompt || QuestionGuidePrompt}
|
||||
</Box>
|
||||
</Box>
|
||||
</>
|
||||
@@ -178,8 +178,8 @@ const QGConfigModal = ({
|
||||
{isOpenCustomPrompt && (
|
||||
<CustomPromptEditor
|
||||
defaultValue={customPrompt}
|
||||
defaultPrompt={getQuestionGuidePrompt()}
|
||||
footerPrompt={getQuestionGuideFooterPrompt()}
|
||||
defaultPrompt={QuestionGuidePrompt}
|
||||
footerPrompt={QuestionGuideFooterPrompt}
|
||||
onChange={(e) => {
|
||||
onChange({
|
||||
...value,
|
||||
|
Reference in New Issue
Block a user