mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 12:20:34 +00:00
feat: node prompt version (#4141)
* feat: node prompt version * fix * delete unused code * fix * fix code
This commit is contained in:
@@ -4,8 +4,8 @@ import { countGptMessagesTokens, countPromptTokens } from '../../../common/strin
|
||||
import { loadRequestMessages } from '../../chat/utils';
|
||||
import { llmCompletionsBodyFormat } from '../utils';
|
||||
import {
|
||||
PROMPT_QUESTION_GUIDE,
|
||||
PROMPT_QUESTION_GUIDE_FOOTER
|
||||
getQuestionGuideFooterPrompt,
|
||||
getQuestionGuidePrompt
|
||||
} from '@fastgpt/global/core/ai/prompt/agent';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import json5 from 'json5';
|
||||
@@ -27,7 +27,7 @@ export async function createQuestionGuide({
|
||||
...messages,
|
||||
{
|
||||
role: 'user',
|
||||
content: `${customPrompt || PROMPT_QUESTION_GUIDE}\n${PROMPT_QUESTION_GUIDE_FOOTER}`
|
||||
content: `${customPrompt || getQuestionGuidePrompt()}\n${getQuestionGuideFooterPrompt()}`
|
||||
}
|
||||
];
|
||||
const requestMessages = await loadRequestMessages({
|
||||
|
@@ -10,8 +10,7 @@ import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/workflo
|
||||
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
import { Prompt_CQJson } from '@fastgpt/global/core/ai/prompt/agent';
|
||||
import { getCQPrompt } from '@fastgpt/global/core/ai/prompt/agent';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getLLMModel } from '../../../ai/model';
|
||||
import { getHistories } from '../utils';
|
||||
@@ -23,6 +22,7 @@ import { loadRequestMessages } from '../../../chat/utils';
|
||||
import { llmCompletionsBodyFormat } from '../../../ai/utils';
|
||||
import { addLog } from '../../../../common/system/log';
|
||||
import { ModelTypeEnum } from '../../../../../global/core/ai/model';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.aiModel]: string;
|
||||
@@ -99,7 +99,8 @@ const completions = async ({
|
||||
cqModel,
|
||||
externalProvider,
|
||||
histories,
|
||||
params: { agents, systemPrompt = '', userChatInput }
|
||||
params: { agents, systemPrompt = '', userChatInput },
|
||||
node: { version }
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
@@ -108,7 +109,7 @@ const completions = async ({
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: replaceVariable(cqModel.customCQPrompt || Prompt_CQJson, {
|
||||
content: replaceVariable(cqModel.customCQPrompt || getCQPrompt(version), {
|
||||
systemPrompt: systemPrompt || 'null',
|
||||
typeList: agents
|
||||
.map((item) => `{"类型ID":"${item.key}", "问题类型":"${item.value}"}`)
|
||||
|
@@ -16,7 +16,6 @@ import {
|
||||
} from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { Prompt_ExtractJson } from '@fastgpt/global/core/ai/prompt/agent';
|
||||
import { replaceVariable, sliceJsonStr } from '@fastgpt/global/common/string/tools';
|
||||
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { getHistories } from '../utils';
|
||||
@@ -33,6 +32,10 @@ import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/ty
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { llmCompletionsBodyFormat } from '../../../ai/utils';
|
||||
import { ModelTypeEnum } from '../../../../../global/core/ai/model';
|
||||
import {
|
||||
getExtractJsonPrompt,
|
||||
getExtractJsonToolPrompt
|
||||
} from '@fastgpt/global/core/ai/prompt/agent';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.history]?: ChatItemType[];
|
||||
@@ -154,7 +157,8 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
|
||||
const getFunctionCallSchema = async ({
|
||||
extractModel,
|
||||
histories,
|
||||
params: { content, extractKeys, description }
|
||||
params: { content, extractKeys, description },
|
||||
node: { version }
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
...histories,
|
||||
@@ -164,15 +168,10 @@ const getFunctionCallSchema = async ({
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: `我正在执行一个函数,需要你提供一些参数,请以 JSON 字符串格式返回这些参数,要求:
|
||||
"""
|
||||
${description ? `- ${description}` : ''}
|
||||
- 不是每个参数都是必须生成的,如果没有合适的参数值,不要生成该参数,或返回空字符串。
|
||||
- 需要结合前面的对话内容,一起生成合适的参数。
|
||||
"""
|
||||
|
||||
本次输入内容: """${content}"""
|
||||
`
|
||||
content: replaceVariable(getExtractJsonToolPrompt(version), {
|
||||
description,
|
||||
content
|
||||
})
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -334,7 +333,8 @@ const completions = async ({
|
||||
extractModel,
|
||||
externalProvider,
|
||||
histories,
|
||||
params: { content, extractKeys, description = 'No special requirements' }
|
||||
params: { content, extractKeys, description = 'No special requirements' },
|
||||
node: { version }
|
||||
}: ActionProps) => {
|
||||
const messages: ChatItemType[] = [
|
||||
{
|
||||
@@ -343,23 +343,26 @@ const completions = async ({
|
||||
{
|
||||
type: ChatItemValueTypeEnum.text,
|
||||
text: {
|
||||
content: replaceVariable(extractModel.customExtractPrompt || Prompt_ExtractJson, {
|
||||
description,
|
||||
json: extractKeys
|
||||
.map((item) => {
|
||||
const valueType = item.valueType || 'string';
|
||||
if (valueType !== 'string' && valueType !== 'number') {
|
||||
item.enum = undefined;
|
||||
}
|
||||
content: replaceVariable(
|
||||
extractModel.customExtractPrompt || getExtractJsonPrompt(version),
|
||||
{
|
||||
description,
|
||||
json: extractKeys
|
||||
.map((item) => {
|
||||
const valueType = item.valueType || 'string';
|
||||
if (valueType !== 'string' && valueType !== 'number') {
|
||||
item.enum = undefined;
|
||||
}
|
||||
|
||||
return `{"type":${item.valueType || 'string'}, "key":"${item.key}", "description":"${item.desc}" ${
|
||||
item.enum ? `, "enum":"[${item.enum.split('\n')}]"` : ''
|
||||
}}`;
|
||||
})
|
||||
.join('\n'),
|
||||
text: `${histories.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`).join('\n')}
|
||||
return `{"type":${item.valueType || 'string'}, "key":"${item.key}", "description":"${item.desc}" ${
|
||||
item.enum ? `, "enum":"[${item.enum.split('\n')}]"` : ''
|
||||
}}`;
|
||||
})
|
||||
.join('\n'),
|
||||
text: `${histories.map((item) => `${item.obj}:${chatValue2RuntimePrompt(item.value).text}`).join('\n')}
|
||||
Human: ${content}`
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
]
|
||||
|
@@ -28,10 +28,10 @@ import { filterToolResponseToPreview } from './utils';
|
||||
import { InteractiveNodeResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { getFileContentFromLinks, getHistoryFileLinks } from '../../tools/readFiles';
|
||||
import { parseUrlToFileType } from '@fastgpt/global/common/file/tools';
|
||||
import { Prompt_DocumentQuote } from '@fastgpt/global/core/ai/prompt/AIChat';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { postTextCensor } from '../../../../../common/api/requestPlusApi';
|
||||
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
import { getDocumentQuotePrompt } from '@fastgpt/global/core/ai/prompt/AIChat';
|
||||
|
||||
type Response = DispatchNodeResultType<{
|
||||
[NodeOutputKeyEnum.answerText]: string;
|
||||
@@ -40,7 +40,7 @@ type Response = DispatchNodeResultType<{
|
||||
|
||||
export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<Response> => {
|
||||
const {
|
||||
node: { nodeId, name, isEntry },
|
||||
node: { nodeId, name, isEntry, version },
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
histories,
|
||||
@@ -118,7 +118,7 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
|
||||
toolModel.defaultSystemChatPrompt,
|
||||
systemPrompt,
|
||||
documentQuoteText
|
||||
? replaceVariable(Prompt_DocumentQuote, {
|
||||
? replaceVariable(getDocumentQuotePrompt(version), {
|
||||
quote: documentQuoteText
|
||||
})
|
||||
: ''
|
||||
|
@@ -24,10 +24,9 @@ import {
|
||||
runtimePrompt2ChatsValue
|
||||
} from '@fastgpt/global/core/chat/adapt';
|
||||
import {
|
||||
Prompt_DocumentQuote,
|
||||
Prompt_userQuotePromptList,
|
||||
Prompt_QuoteTemplateList,
|
||||
Prompt_systemQuotePromptList
|
||||
getQuoteTemplate,
|
||||
getQuotePrompt,
|
||||
getDocumentQuotePrompt
|
||||
} from '@fastgpt/global/core/ai/prompt/AIChat';
|
||||
import type { AIChatNodeProps } from '@fastgpt/global/core/workflow/runtime/type.d';
|
||||
import { replaceVariable } from '@fastgpt/global/common/string/tools';
|
||||
@@ -70,7 +69,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
stream = false,
|
||||
externalProvider,
|
||||
histories,
|
||||
node: { name },
|
||||
node: { name, version },
|
||||
query,
|
||||
runningUserInfo,
|
||||
workflowStreamResponse,
|
||||
@@ -115,7 +114,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
filterDatasetQuote({
|
||||
quoteQA,
|
||||
model: modelConstantsData,
|
||||
quoteTemplate
|
||||
quoteTemplate: quoteTemplate || getQuoteTemplate(version)
|
||||
}),
|
||||
getMultiInput({
|
||||
histories: chatHistories,
|
||||
@@ -147,6 +146,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
datasetQuoteText,
|
||||
aiChatQuoteRole,
|
||||
datasetQuotePrompt: quotePrompt,
|
||||
version,
|
||||
userChatInput,
|
||||
systemPrompt,
|
||||
userFiles,
|
||||
@@ -326,10 +326,10 @@ async function filterDatasetQuote({
|
||||
}: {
|
||||
quoteQA: ChatProps['params']['quoteQA'];
|
||||
model: LLMModelItemType;
|
||||
quoteTemplate?: string;
|
||||
quoteTemplate: string;
|
||||
}) {
|
||||
function getValue(item: SearchDataResponseItemType, index: number) {
|
||||
return replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
|
||||
return replaceVariable(quoteTemplate, {
|
||||
id: item.id,
|
||||
q: item.q,
|
||||
a: item.a,
|
||||
@@ -425,6 +425,7 @@ async function getChatMessages({
|
||||
datasetQuotePrompt = '',
|
||||
datasetQuoteText,
|
||||
useDatasetQuote,
|
||||
version,
|
||||
histories = [],
|
||||
systemPrompt,
|
||||
userChatInput,
|
||||
@@ -437,6 +438,7 @@ async function getChatMessages({
|
||||
aiChatQuoteRole: AiChatQuoteRoleType; // user: replace user prompt; system: replace system prompt
|
||||
datasetQuotePrompt?: string;
|
||||
datasetQuoteText: string;
|
||||
version: string;
|
||||
|
||||
useDatasetQuote: boolean;
|
||||
histories: ChatItemType[];
|
||||
@@ -451,11 +453,7 @@ async function getChatMessages({
|
||||
const quoteRole =
|
||||
aiChatQuoteRole === 'user' || datasetQuotePrompt.includes('{{question}}') ? 'user' : 'system';
|
||||
|
||||
const datasetQuotePromptTemplate = datasetQuotePrompt
|
||||
? datasetQuotePrompt
|
||||
: quoteRole === 'user'
|
||||
? Prompt_userQuotePromptList[0].value
|
||||
: Prompt_systemQuotePromptList[0].value;
|
||||
const datasetQuotePromptTemplate = datasetQuotePrompt || getQuotePrompt(version, quoteRole);
|
||||
|
||||
// Reset user input, add dataset quote to user input
|
||||
const replaceInputValue =
|
||||
@@ -477,7 +475,7 @@ async function getChatMessages({
|
||||
})
|
||||
: '',
|
||||
documentQuoteText
|
||||
? replaceVariable(Prompt_DocumentQuote, {
|
||||
? replaceVariable(getDocumentQuotePrompt(version), {
|
||||
quote: documentQuoteText
|
||||
})
|
||||
: ''
|
||||
|
Reference in New Issue
Block a user