mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-03 21:48:02 +00:00
v4.4.3 (#316)
This commit is contained in:
@@ -613,11 +613,6 @@ const ChatBox = (
|
||||
flexDirection={'column'}
|
||||
alignItems={item.obj === 'Human' ? 'flex-end' : 'flex-start'}
|
||||
py={5}
|
||||
_hover={{
|
||||
'& .control': {
|
||||
display: item.status === 'finish' ? 'flex' : 'none'
|
||||
}
|
||||
}}
|
||||
>
|
||||
{item.obj === 'Human' && (
|
||||
<>
|
||||
|
@@ -10,11 +10,10 @@ import {
|
||||
} from '@chakra-ui/react';
|
||||
|
||||
interface Props extends ModalContentProps {
|
||||
showCloseBtn?: boolean;
|
||||
title?: any;
|
||||
isCentered?: boolean;
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onClose?: () => void;
|
||||
}
|
||||
|
||||
const MyModal = ({
|
||||
@@ -22,14 +21,18 @@ const MyModal = ({
|
||||
onClose,
|
||||
title,
|
||||
children,
|
||||
showCloseBtn = true,
|
||||
isCentered,
|
||||
w = 'auto',
|
||||
maxW = ['90vw', '600px'],
|
||||
...props
|
||||
}: Props) => {
|
||||
return (
|
||||
<Modal isOpen={isOpen} onClose={onClose} autoFocus={false} isCentered={isCentered}>
|
||||
<Modal
|
||||
isOpen={isOpen}
|
||||
onClose={() => onClose && onClose()}
|
||||
autoFocus={false}
|
||||
isCentered={isCentered}
|
||||
>
|
||||
<ModalOverlay />
|
||||
<ModalContent
|
||||
display={'flex'}
|
||||
@@ -43,7 +46,7 @@ const MyModal = ({
|
||||
>
|
||||
{!!title && <ModalHeader>{title}</ModalHeader>}
|
||||
<Box overflow={'overlay'} h={'100%'}>
|
||||
{showCloseBtn && <ModalCloseButton />}
|
||||
{onClose && <ModalCloseButton />}
|
||||
{children}
|
||||
</Box>
|
||||
</ModalContent>
|
||||
|
@@ -163,19 +163,23 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
value: ''
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: FlowInputItemTypeEnum.textarea,
|
||||
key: 'quoteTemplate',
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
label: '引用内容模板',
|
||||
valueType: FlowValueTypeEnum.string,
|
||||
value: ''
|
||||
},
|
||||
{
|
||||
key: 'quotePrompt',
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
label: '引用内容提示词',
|
||||
valueType: FlowValueTypeEnum.string,
|
||||
label: '限定词',
|
||||
max: 500,
|
||||
description: ChatModelLimitTip,
|
||||
placeholder: ChatModelLimitTip,
|
||||
value: ''
|
||||
},
|
||||
Input_Template_TFSwitch,
|
||||
{
|
||||
key: 'quoteQA',
|
||||
type: FlowInputItemTypeEnum.target,
|
||||
type: FlowInputItemTypeEnum.custom,
|
||||
label: '引用内容',
|
||||
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
|
||||
valueType: FlowValueTypeEnum.kbQuote
|
||||
@@ -664,19 +668,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
|
||||
value: '',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: 'textarea',
|
||||
valueType: 'string',
|
||||
label: '限定词',
|
||||
max: 500,
|
||||
description:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
placeholder:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
value: '',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
type: 'target',
|
||||
@@ -1013,18 +1004,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
|
||||
value: '',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: 'textarea',
|
||||
valueType: 'string',
|
||||
label: '限定词',
|
||||
description:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
placeholder:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
value: '',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
type: 'target',
|
||||
@@ -1319,18 +1298,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
|
||||
value: '',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: 'textarea',
|
||||
valueType: 'string',
|
||||
label: '限定词',
|
||||
description:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
placeholder:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
value: '将我的问题直接翻译成英语{{language}}',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
type: 'target',
|
||||
@@ -1703,19 +1670,6 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
|
||||
value: '知识库是关于 laf 的内容。',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: 'textarea',
|
||||
valueType: 'string',
|
||||
label: '限定词',
|
||||
description:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
placeholder:
|
||||
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。可使用变量,例如 {{language}}。引导例子:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
|
||||
value:
|
||||
'我的问题都是关于 laf 的。根据知识库回答我的问题,与 laf 无关问题,直接回复:“我不清楚,我仅能回答 laf 相关的问题。”。',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
type: 'target',
|
||||
|
@@ -64,16 +64,7 @@ const PayModal = ({ onClose }: { onClose: () => void }) => {
|
||||
);
|
||||
|
||||
return (
|
||||
<MyModal
|
||||
isOpen={true}
|
||||
onClose={() => {
|
||||
if (payId) return;
|
||||
onClose();
|
||||
}}
|
||||
title={t('user.Pay')}
|
||||
isCentered
|
||||
showCloseBtn={!payId}
|
||||
>
|
||||
<MyModal isOpen={true} onClose={payId ? onClose : undefined} title={t('user.Pay')} isCentered>
|
||||
<ModalBody py={0}>
|
||||
{!payId && (
|
||||
<>
|
||||
|
@@ -1,446 +0,0 @@
|
||||
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
import { connectToDatabase, App } from '@/service/mongo';
|
||||
import { FlowModuleTypeEnum, SpecialInputKeyEnum } from '@/constants/flow';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import { FlowInputItemType } from '@/types/flow';
|
||||
|
||||
const chatModelInput = ({
|
||||
model,
|
||||
temperature,
|
||||
maxToken,
|
||||
systemPrompt,
|
||||
limitPrompt,
|
||||
kbList
|
||||
}: {
|
||||
model: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
kbList: { kbId: string }[];
|
||||
}): FlowInputItemType[] => [
|
||||
{
|
||||
key: 'model',
|
||||
value: model,
|
||||
type: 'custom',
|
||||
label: '对话模型',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
value: temperature,
|
||||
label: '温度',
|
||||
type: 'slider',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'maxToken',
|
||||
value: maxToken,
|
||||
type: 'custom',
|
||||
label: '回复上限',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
value: systemPrompt,
|
||||
type: 'textarea',
|
||||
label: '系统提示词',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
label: '限定词',
|
||||
type: 'textarea',
|
||||
value: limitPrompt,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
type: 'target',
|
||||
label: '触发器',
|
||||
connected: kbList.length > 0
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
type: 'target',
|
||||
label: '引用内容',
|
||||
connected: kbList.length > 0
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
type: 'target',
|
||||
label: '聊天记录',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
type: 'target',
|
||||
label: '用户问题',
|
||||
connected: true
|
||||
}
|
||||
];
|
||||
const chatTemplate = ({
|
||||
model,
|
||||
temperature,
|
||||
maxToken,
|
||||
systemPrompt,
|
||||
limitPrompt
|
||||
}: {
|
||||
model: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
}) => {
|
||||
return [
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.questionInput,
|
||||
inputs: [
|
||||
{
|
||||
key: 'userChatInput',
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'userChatInput',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'userChatInput'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 464.32198615344566,
|
||||
y: 1602.2698463081606
|
||||
},
|
||||
moduleId: 'userChatInput'
|
||||
},
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.historyNode,
|
||||
inputs: [
|
||||
{
|
||||
key: 'maxContext',
|
||||
value: 10,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'history',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'history'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 452.5466249541586,
|
||||
y: 1276.3930310334215
|
||||
},
|
||||
moduleId: 'history'
|
||||
},
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.chatNode,
|
||||
inputs: chatModelInput({
|
||||
model,
|
||||
temperature,
|
||||
maxToken,
|
||||
systemPrompt,
|
||||
limitPrompt,
|
||||
kbList: []
|
||||
}),
|
||||
outputs: [
|
||||
{
|
||||
key: TaskResponseKeyEnum.answerText,
|
||||
targets: []
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 981.9682828103937,
|
||||
y: 890.014595014464
|
||||
},
|
||||
moduleId: 'chatModule'
|
||||
}
|
||||
];
|
||||
};
|
||||
const kbTemplate = ({
|
||||
model,
|
||||
temperature,
|
||||
maxToken,
|
||||
systemPrompt,
|
||||
limitPrompt,
|
||||
kbList = [],
|
||||
searchSimilarity,
|
||||
searchLimit,
|
||||
searchEmptyText
|
||||
}: {
|
||||
model: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
kbList: { kbId: string }[];
|
||||
searchSimilarity: number;
|
||||
searchLimit: number;
|
||||
searchEmptyText: string;
|
||||
}) => {
|
||||
return [
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.questionInput,
|
||||
inputs: [
|
||||
{
|
||||
key: 'userChatInput',
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'userChatInput',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'userChatInput'
|
||||
},
|
||||
{
|
||||
moduleId: 'kbSearch',
|
||||
key: 'userChatInput'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 464.32198615344566,
|
||||
y: 1602.2698463081606
|
||||
},
|
||||
moduleId: 'userChatInput'
|
||||
},
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.historyNode,
|
||||
inputs: [
|
||||
{
|
||||
key: 'maxContext',
|
||||
value: 10,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'history',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'history'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 452.5466249541586,
|
||||
y: 1276.3930310334215
|
||||
},
|
||||
moduleId: 'history'
|
||||
},
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.kbSearchNode,
|
||||
inputs: [
|
||||
{
|
||||
key: 'kbList',
|
||||
value: kbList,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'similarity',
|
||||
value: searchSimilarity,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limit',
|
||||
value: searchLimit,
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'switch',
|
||||
connected: false
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: 'isEmpty',
|
||||
targets: searchEmptyText
|
||||
? [
|
||||
{
|
||||
moduleId: 'emptyText',
|
||||
key: 'switch'
|
||||
}
|
||||
]
|
||||
: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'switch'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'unEmpty',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'switch'
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
targets: [
|
||||
{
|
||||
moduleId: 'chatModule',
|
||||
key: 'quoteQA'
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 956.0838440206068,
|
||||
y: 887.462827870246
|
||||
},
|
||||
moduleId: 'kbSearch'
|
||||
},
|
||||
...(searchEmptyText
|
||||
? [
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.answerNode,
|
||||
inputs: [
|
||||
{
|
||||
key: 'switch',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: SpecialInputKeyEnum.answerText,
|
||||
value: searchEmptyText,
|
||||
connected: true
|
||||
}
|
||||
],
|
||||
outputs: [],
|
||||
position: {
|
||||
x: 1553.5815811529146,
|
||||
y: 637.8753731306779
|
||||
},
|
||||
moduleId: 'emptyText'
|
||||
}
|
||||
]
|
||||
: []),
|
||||
{
|
||||
flowType: FlowModuleTypeEnum.chatNode,
|
||||
inputs: chatModelInput({ model, temperature, maxToken, systemPrompt, limitPrompt, kbList }),
|
||||
outputs: [
|
||||
{
|
||||
key: TaskResponseKeyEnum.answerText,
|
||||
targets: []
|
||||
}
|
||||
],
|
||||
position: {
|
||||
x: 1551.71405495818,
|
||||
y: 977.4911578918461
|
||||
},
|
||||
moduleId: 'chatModule'
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await authUser({ req, authRoot: true });
|
||||
await connectToDatabase();
|
||||
|
||||
const { limit = 1000 } = req.body as { limit: number };
|
||||
let skip = 0;
|
||||
const total = await App.countDocuments();
|
||||
let promise = Promise.resolve();
|
||||
console.log(total);
|
||||
|
||||
for (let i = 0; i < total; i += limit) {
|
||||
const skipVal = skip;
|
||||
skip += limit;
|
||||
promise = promise
|
||||
.then(() => init(limit, skipVal))
|
||||
.then(() => {
|
||||
console.log(skipVal);
|
||||
});
|
||||
}
|
||||
|
||||
await promise;
|
||||
|
||||
jsonRes(res, {});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function init(limit: number, skip: number) {
|
||||
// 遍历 app
|
||||
const apps = await App.find(
|
||||
{
|
||||
chat: { $ne: null },
|
||||
modules: { $exists: false }
|
||||
// userId: '63f9a14228d2a688d8dc9e1b'
|
||||
},
|
||||
'_id chat'
|
||||
).limit(limit);
|
||||
|
||||
return Promise.all(
|
||||
apps.map(async (app) => {
|
||||
if (!app.chat) return app;
|
||||
const modules = (() => {
|
||||
if (app.chat.relatedKbs.length === 0) {
|
||||
return chatTemplate({
|
||||
model: app.chat.chatModel,
|
||||
temperature: app.chat.temperature,
|
||||
maxToken: app.chat.maxToken,
|
||||
systemPrompt: app.chat.systemPrompt,
|
||||
limitPrompt: app.chat.limitPrompt
|
||||
});
|
||||
} else {
|
||||
return kbTemplate({
|
||||
model: app.chat.chatModel,
|
||||
temperature: app.chat.temperature,
|
||||
maxToken: app.chat.maxToken,
|
||||
systemPrompt: app.chat.systemPrompt,
|
||||
limitPrompt: app.chat.limitPrompt,
|
||||
kbList: app.chat.relatedKbs.map((id) => ({ kbId: id })),
|
||||
searchEmptyText: app.chat.searchEmptyText,
|
||||
searchLimit: app.chat.searchLimit,
|
||||
searchSimilarity: app.chat.searchSimilarity
|
||||
});
|
||||
}
|
||||
})();
|
||||
|
||||
await App.findByIdAndUpdate(app.id, {
|
||||
modules
|
||||
});
|
||||
return modules;
|
||||
})
|
||||
);
|
||||
}
|
@@ -31,6 +31,7 @@ import { SystemInputEnum } from '@/constants/app';
|
||||
import { getSystemTime } from '@/utils/user';
|
||||
import { authOutLinkChat } from '@/service/support/outLink/auth';
|
||||
import requestIp from 'request-ip';
|
||||
import { replaceVariable } from '@/utils/common/tools/text';
|
||||
|
||||
export type MessageItemType = ChatCompletionRequestMessage & { dataId?: string };
|
||||
type FastGptWebChatProps = {
|
||||
@@ -424,10 +425,7 @@ function loadModules(
|
||||
}
|
||||
|
||||
// variables replace
|
||||
const replacedVal = item.value.replace(
|
||||
/{{(.*?)}}/g,
|
||||
(match, key) => variables[key.trim()] || match
|
||||
);
|
||||
const replacedVal = replaceVariable(item.value, variables);
|
||||
|
||||
return {
|
||||
key: item.key,
|
||||
|
112
client/src/pages/app/detail/components/AIChatSettingsModal.tsx
Normal file
112
client/src/pages/app/detail/components/AIChatSettingsModal.tsx
Normal file
@@ -0,0 +1,112 @@
|
||||
import React from 'react';
|
||||
import MyModal from '@/components/MyModal';
|
||||
import { useTranslation } from 'react-i18next';
|
||||
import { EditFormType } from '@/utils/app';
|
||||
import { useForm } from 'react-hook-form';
|
||||
import {
|
||||
Box,
|
||||
BoxProps,
|
||||
Button,
|
||||
Flex,
|
||||
Link,
|
||||
ModalBody,
|
||||
ModalFooter,
|
||||
Textarea
|
||||
} from '@chakra-ui/react';
|
||||
import MyTooltip from '@/components/MyTooltip';
|
||||
import { QuestionOutlineIcon } from '@chakra-ui/icons';
|
||||
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/prompts/core/AIChat';
|
||||
import { feConfigs } from '@/store/static';
|
||||
|
||||
const AIChatSettingsModal = ({
|
||||
onClose,
|
||||
onSuccess,
|
||||
defaultData
|
||||
}: {
|
||||
onClose: () => void;
|
||||
onSuccess: (e: EditFormType['chatModel']) => void;
|
||||
defaultData: EditFormType['chatModel'];
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const { register, handleSubmit } = useForm({
|
||||
defaultValues: defaultData
|
||||
});
|
||||
|
||||
const LabelStyles: BoxProps = {
|
||||
fontWeight: 'bold',
|
||||
mb: 1,
|
||||
fontSize: ['sm', 'md']
|
||||
};
|
||||
|
||||
return (
|
||||
<MyModal
|
||||
isOpen
|
||||
title={
|
||||
<Flex alignItems={'flex-end'}>
|
||||
{t('app.Quote Prompt Settings')}
|
||||
{feConfigs?.show_doc && (
|
||||
<Link
|
||||
href={'https://doc.fastgpt.run/docs/use-cases/prompt/'}
|
||||
target={'_blank'}
|
||||
ml={1}
|
||||
textDecoration={'underline'}
|
||||
fontWeight={'normal'}
|
||||
fontSize={'md'}
|
||||
>
|
||||
查看说明
|
||||
</Link>
|
||||
)}
|
||||
</Flex>
|
||||
}
|
||||
w={'700px'}
|
||||
>
|
||||
<ModalBody>
|
||||
<Box>
|
||||
<Box {...LabelStyles}>
|
||||
引用内容模板
|
||||
<MyTooltip
|
||||
label={t('template.Quote Content Tip', { default: defaultQuoteTemplate })}
|
||||
forceShow
|
||||
>
|
||||
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
||||
</MyTooltip>
|
||||
</Box>
|
||||
<Textarea
|
||||
rows={4}
|
||||
placeholder={t('template.Quote Content Tip', { default: defaultQuoteTemplate }) || ''}
|
||||
borderColor={'myGray.100'}
|
||||
{...register('quoteTemplate')}
|
||||
/>
|
||||
</Box>
|
||||
<Box mt={4}>
|
||||
<Box {...LabelStyles}>
|
||||
引用内容提示词
|
||||
<MyTooltip
|
||||
label={t('template.Quote Prompt Tip', { default: defaultQuotePrompt })}
|
||||
forceShow
|
||||
>
|
||||
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
||||
</MyTooltip>
|
||||
</Box>
|
||||
<Textarea
|
||||
rows={6}
|
||||
placeholder={t('template.Quote Prompt Tip', { default: defaultQuotePrompt }) || ''}
|
||||
borderColor={'myGray.100'}
|
||||
{...register('quotePrompt')}
|
||||
/>
|
||||
</Box>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant={'base'} onClick={onClose}>
|
||||
{t('Cancel')}
|
||||
</Button>
|
||||
<Button ml={4} onClick={handleSubmit(onSuccess)}>
|
||||
{t('Confirm')}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</MyModal>
|
||||
);
|
||||
};
|
||||
|
||||
export default AIChatSettingsModal;
|
@@ -4,21 +4,36 @@ import NodeCard from '../modules/NodeCard';
|
||||
import { FlowModuleItemType } from '@/types/flow';
|
||||
import Divider from '../modules/Divider';
|
||||
import Container from '../modules/Container';
|
||||
import RenderInput from '../render/RenderInput';
|
||||
import RenderInput, { Label } from '../render/RenderInput';
|
||||
import RenderOutput from '../render/RenderOutput';
|
||||
import { FlowOutputItemTypeEnum } from '@/constants/flow';
|
||||
import MySelect from '@/components/Select';
|
||||
import { chatModelList } from '@/store/static';
|
||||
import MySlider from '@/components/Slider';
|
||||
import { Box } from '@chakra-ui/react';
|
||||
import { Box, Button, Flex, useDisclosure } from '@chakra-ui/react';
|
||||
import { formatPrice } from '@/utils/user';
|
||||
import MyIcon from '@/components/Icon';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { AIChatProps } from '@/types/core/aiChat';
|
||||
|
||||
const AIChatSettingsModal = dynamic(() => import('../../../AIChatSettingsModal'));
|
||||
|
||||
const NodeChat = ({ data }: NodeProps<FlowModuleItemType>) => {
|
||||
const { moduleId, inputs, outputs, onChangeNode } = data;
|
||||
const outputsLen = useMemo(
|
||||
() => outputs.filter((item) => item.type !== FlowOutputItemTypeEnum.hidden).length,
|
||||
[outputs]
|
||||
);
|
||||
|
||||
const chatModulesData = useMemo(() => {
|
||||
const obj: Record<string, any> = {};
|
||||
inputs.forEach((item) => {
|
||||
obj[item.key] = item.value;
|
||||
});
|
||||
return obj as AIChatProps;
|
||||
}, [inputs]);
|
||||
|
||||
const {
|
||||
isOpen: isOpenAIChatSetting,
|
||||
onOpen: onOpenAIChatSetting,
|
||||
onClose: onCloseAIChatSetting
|
||||
} = useDisclosure();
|
||||
|
||||
return (
|
||||
<NodeCard minW={'400px'} {...data}>
|
||||
@@ -109,21 +124,48 @@ const NodeChat = ({ data }: NodeProps<FlowModuleItemType>) => {
|
||||
/>
|
||||
</Box>
|
||||
);
|
||||
},
|
||||
quoteQA: (inputItem) => {
|
||||
return (
|
||||
<Button
|
||||
variant={'base'}
|
||||
leftIcon={<MyIcon name={'settingLight'} w={'14px'} />}
|
||||
onClick={onOpenAIChatSetting}
|
||||
>
|
||||
引用提示词设置
|
||||
</Button>
|
||||
);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</Container>
|
||||
{outputsLen > 0 && (
|
||||
<>
|
||||
<Divider text="Output" />
|
||||
<Container>
|
||||
<RenderOutput
|
||||
onChangeNode={onChangeNode}
|
||||
moduleId={moduleId}
|
||||
flowOutputList={outputs}
|
||||
/>
|
||||
</Container>
|
||||
</>
|
||||
<Divider text="Output" />
|
||||
<Container>
|
||||
<RenderOutput onChangeNode={onChangeNode} moduleId={moduleId} flowOutputList={outputs} />
|
||||
</Container>
|
||||
|
||||
{isOpenAIChatSetting && (
|
||||
<AIChatSettingsModal
|
||||
onClose={onCloseAIChatSetting}
|
||||
onSuccess={(e) => {
|
||||
for (let key in e) {
|
||||
const item = inputs.find((input) => input.key === key);
|
||||
if (!item) continue;
|
||||
onChangeNode({
|
||||
moduleId,
|
||||
type: 'inputs',
|
||||
key,
|
||||
value: {
|
||||
...item,
|
||||
// @ts-ignore
|
||||
value: e[key]
|
||||
}
|
||||
});
|
||||
}
|
||||
onCloseAIChatSetting();
|
||||
}}
|
||||
defaultData={chatModulesData}
|
||||
/>
|
||||
)}
|
||||
</NodeCard>
|
||||
);
|
||||
|
@@ -63,6 +63,7 @@ import { useDatasetStore } from '@/store/dataset';
|
||||
const VariableEditModal = dynamic(() => import('../VariableEditModal'));
|
||||
const InfoModal = dynamic(() => import('../InfoModal'));
|
||||
const KBSelectModal = dynamic(() => import('../KBSelectModal'));
|
||||
const AIChatSettingsModal = dynamic(() => import('../AIChatSettingsModal'));
|
||||
|
||||
const Settings = ({ appId }: { appId: string }) => {
|
||||
const theme = useTheme();
|
||||
@@ -101,6 +102,11 @@ const Settings = ({ appId }: { appId: string }) => {
|
||||
name: 'kb.list'
|
||||
});
|
||||
|
||||
const {
|
||||
isOpen: isOpenAIChatSetting,
|
||||
onOpen: onOpenAIChatSetting,
|
||||
onClose: onCloseAIChatSetting
|
||||
} = useDisclosure();
|
||||
const {
|
||||
isOpen: isOpenKbSelect,
|
||||
onOpen: onOpenKbSelect,
|
||||
@@ -335,51 +341,61 @@ const Settings = ({ appId }: { appId: string }) => {
|
||||
+ 新增
|
||||
</Flex>
|
||||
</Flex>
|
||||
<Box mt={2} borderRadius={'lg'} overflow={'hidden'} borderWidth={'1px'} borderBottom="none">
|
||||
<TableContainer>
|
||||
<Table bg={'white'}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th>变量名</Th>
|
||||
<Th>变量 key</Th>
|
||||
<Th>必填</Th>
|
||||
<Th></Th>
|
||||
</Tr>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{variables.map((item, index) => (
|
||||
<Tr key={item.id}>
|
||||
<Td>{item.label} </Td>
|
||||
<Td>{item.key}</Td>
|
||||
<Td>{item.required ? '✔' : ''}</Td>
|
||||
<Td>
|
||||
<MyIcon
|
||||
mr={3}
|
||||
name={'settingLight'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => setEditVariable(item)}
|
||||
/>
|
||||
<MyIcon
|
||||
name={'delete'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => removeVariable(index)}
|
||||
/>
|
||||
</Td>
|
||||
{variables.length > 0 && (
|
||||
<Box
|
||||
mt={2}
|
||||
borderRadius={'lg'}
|
||||
overflow={'hidden'}
|
||||
borderWidth={'1px'}
|
||||
borderBottom="none"
|
||||
>
|
||||
<TableContainer>
|
||||
<Table bg={'white'}>
|
||||
<Thead>
|
||||
<Tr>
|
||||
<Th>变量名</Th>
|
||||
<Th>变量 key</Th>
|
||||
<Th>必填</Th>
|
||||
<Th></Th>
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Box>
|
||||
</Thead>
|
||||
<Tbody>
|
||||
{variables.map((item, index) => (
|
||||
<Tr key={item.id}>
|
||||
<Td>{item.label} </Td>
|
||||
<Td>{item.key}</Td>
|
||||
<Td>{item.required ? '✔' : ''}</Td>
|
||||
<Td>
|
||||
<MyIcon
|
||||
mr={3}
|
||||
name={'settingLight'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => setEditVariable(item)}
|
||||
/>
|
||||
<MyIcon
|
||||
name={'delete'}
|
||||
w={'16px'}
|
||||
cursor={'pointer'}
|
||||
onClick={() => removeVariable(index)}
|
||||
/>
|
||||
</Td>
|
||||
</Tr>
|
||||
))}
|
||||
</Tbody>
|
||||
</Table>
|
||||
</TableContainer>
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
|
||||
{/* model */}
|
||||
<Box mt={5} {...BoxStyles}>
|
||||
<Flex alignItems={'center'}>
|
||||
<Avatar src={'/imgs/module/AI.png'} w={'18px'} />
|
||||
<Box ml={2}>AI 配置</Box>
|
||||
<Box ml={2} flex={1}>
|
||||
AI 配置
|
||||
</Box>
|
||||
</Flex>
|
||||
|
||||
<Flex alignItems={'center'} mt={5}>
|
||||
@@ -452,20 +468,6 @@ const Settings = ({ appId }: { appId: string }) => {
|
||||
{...register('chatModel.systemPrompt')}
|
||||
></Textarea>
|
||||
</Flex>
|
||||
<Flex mt={5} alignItems={'flex-start'}>
|
||||
<Box {...LabelStyles}>
|
||||
限定词
|
||||
<MyTooltip label={ChatModelLimitTip} forceShow>
|
||||
<QuestionOutlineIcon display={['none', 'inline']} ml={1} />
|
||||
</MyTooltip>
|
||||
</Box>
|
||||
<Textarea
|
||||
rows={5}
|
||||
placeholder={ChatModelLimitTip}
|
||||
borderColor={'myGray.100'}
|
||||
{...register('chatModel.limitPrompt')}
|
||||
></Textarea>
|
||||
</Flex>
|
||||
</Box>
|
||||
|
||||
{/* kb */}
|
||||
@@ -483,6 +485,10 @@ const Settings = ({ appId }: { appId: string }) => {
|
||||
<MyIcon name={'edit'} w={'14px'} mr={1} />
|
||||
参数
|
||||
</Flex>
|
||||
<Flex {...BoxBtnStyles} onClick={onOpenAIChatSetting}>
|
||||
<MyIcon mr={1} name={'settingLight'} w={'14px'} />
|
||||
提示词
|
||||
</Flex>
|
||||
</Flex>
|
||||
<Flex mt={1} color={'myGray.600'} fontSize={['sm', 'md']}>
|
||||
相似度: {getValues('kb.searchSimilarity')}, 单次搜索数量: {getValues('kb.searchLimit')},
|
||||
@@ -548,6 +554,16 @@ const Settings = ({ appId }: { appId: string }) => {
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
{isOpenAIChatSetting && (
|
||||
<AIChatSettingsModal
|
||||
onClose={onCloseAIChatSetting}
|
||||
onSuccess={(e) => {
|
||||
setValue('chatModel', e);
|
||||
onCloseAIChatSetting();
|
||||
}}
|
||||
defaultData={getValues('chatModel')}
|
||||
/>
|
||||
)}
|
||||
{isOpenKbSelect && (
|
||||
<KBSelectModal
|
||||
activeKbs={selectedKbList.map((item) => ({
|
||||
|
@@ -55,7 +55,6 @@ const InfoModal = ({
|
||||
name: data.name,
|
||||
avatar: data.avatar,
|
||||
intro: data.intro,
|
||||
chat: data.chat,
|
||||
share: data.share
|
||||
});
|
||||
},
|
||||
|
@@ -282,8 +282,6 @@ export function EditLinkModal({
|
||||
return (
|
||||
<MyModal
|
||||
isOpen={true}
|
||||
showCloseBtn={false}
|
||||
onClose={() => {}}
|
||||
title={isEdit ? titleMap.current.edit[type] : titleMap.current.create[type]}
|
||||
>
|
||||
<ModalBody>
|
||||
|
@@ -168,7 +168,7 @@ const InputDataModal = ({
|
||||
</Box>
|
||||
<Box flex={1} h={['50%', '100%']}>
|
||||
<Flex>
|
||||
<Box h={'30px'}>{'预期答案'}</Box>
|
||||
<Box h={'30px'}>{'补充内容'}</Box>
|
||||
<MyTooltip
|
||||
label={'匹配的知识点被命中后,这部分内容会随匹配知识点一起注入模型,引导模型回答'}
|
||||
>
|
||||
@@ -177,9 +177,8 @@ const InputDataModal = ({
|
||||
</Flex>
|
||||
<Textarea
|
||||
placeholder={
|
||||
'预期答案。这部分内容不会被搜索,但会作为"匹配的知识点"的内容补充,通常是问题的答案。总和最多 3000 字。'
|
||||
'这部分内容不会被搜索,但会作为"匹配的知识点"的内容补充,通常是问题的答案。'
|
||||
}
|
||||
maxLength={3000}
|
||||
resize={'none'}
|
||||
h={'calc(100% - 30px)'}
|
||||
{...register('a')}
|
||||
|
10
client/src/prompts/core/AIChat.ts
Normal file
10
client/src/prompts/core/AIChat.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export const defaultQuoteTemplate = `{instruction:"{{q}}",output:"{{a}}"}`;
|
||||
export const defaultQuotePrompt = `你的背景知识:
|
||||
"""
|
||||
{{quote}}
|
||||
"""
|
||||
对话要求:
|
||||
1. 背景知识是最新的,其中 instruction 是相关介绍,output 是预期回答或补充。
|
||||
2. 使用背景知识回答问题。
|
||||
3. 背景知识无法满足问题时,你需严谨的回答问题。
|
||||
我的问题是:"{{question}}"`;
|
@@ -17,12 +17,12 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai';
|
||||
import { AppModuleItemType } from '@/types/app';
|
||||
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
|
||||
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
|
||||
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/prompts/core/AIChat';
|
||||
import type { AIChatProps } from '@/types/core/aiChat';
|
||||
import { replaceVariable } from '@/utils/common/tools/text';
|
||||
|
||||
export type ChatProps = {
|
||||
export type ChatProps = AIChatProps & {
|
||||
res: NextApiResponse;
|
||||
model: string;
|
||||
temperature?: number;
|
||||
maxToken?: number;
|
||||
history?: ChatItemType[];
|
||||
userChatInput: string;
|
||||
stream?: boolean;
|
||||
@@ -52,7 +52,9 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
||||
quoteQA = [],
|
||||
userChatInput,
|
||||
systemPrompt = '',
|
||||
limitPrompt = '',
|
||||
limitPrompt,
|
||||
quoteTemplate,
|
||||
quotePrompt,
|
||||
userOpenaiAccount,
|
||||
outputs
|
||||
} = props as ChatProps;
|
||||
@@ -67,16 +69,16 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
||||
return Promise.reject('The chat model is undefined, you need to select a chat model.');
|
||||
}
|
||||
|
||||
const { filterQuoteQA, quotePrompt, hasQuoteOutput } = filterQuote({
|
||||
const { filterQuoteQA, quoteText, hasQuoteOutput } = filterQuote({
|
||||
quoteQA,
|
||||
model: modelConstantsData
|
||||
model: modelConstantsData,
|
||||
quoteTemplate
|
||||
});
|
||||
|
||||
if (modelConstantsData.censor) {
|
||||
await textCensor({
|
||||
text: `${systemPrompt}
|
||||
${quotePrompt}
|
||||
${limitPrompt}
|
||||
${quoteText}
|
||||
${userChatInput}
|
||||
`
|
||||
});
|
||||
@@ -85,6 +87,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
||||
const { messages, filterMessages } = getChatMessages({
|
||||
model: modelConstantsData,
|
||||
history,
|
||||
quoteText,
|
||||
quotePrompt,
|
||||
userChatInput,
|
||||
systemPrompt,
|
||||
@@ -189,39 +192,40 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
|
||||
|
||||
function filterQuote({
|
||||
quoteQA = [],
|
||||
model
|
||||
model,
|
||||
quoteTemplate
|
||||
}: {
|
||||
quoteQA: ChatProps['quoteQA'];
|
||||
model: ChatModelItemType;
|
||||
quoteTemplate?: string;
|
||||
}) {
|
||||
const sliceResult = sliceMessagesTB({
|
||||
maxTokens: model.quoteMaxToken,
|
||||
messages: quoteQA.map((item) => ({
|
||||
obj: ChatRoleEnum.System,
|
||||
value: item.a ? `${item.q}\n${item.a}` : item.q
|
||||
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, item)
|
||||
}))
|
||||
});
|
||||
|
||||
// slice filterSearch
|
||||
const filterQuoteQA = quoteQA.slice(0, sliceResult.length);
|
||||
|
||||
const quotePrompt =
|
||||
const quoteText =
|
||||
filterQuoteQA.length > 0
|
||||
? `"""${filterQuoteQA
|
||||
.map((item) =>
|
||||
item.a ? `{instruction:"${item.q}",output:"${item.a}"}` : `{instruction:"${item.q}"}`
|
||||
)
|
||||
.join('\n')}"""`
|
||||
? `${filterQuoteQA
|
||||
.map((item) => replaceVariable(quoteTemplate || defaultQuoteTemplate, item))
|
||||
.join('\n')}`
|
||||
: '';
|
||||
|
||||
return {
|
||||
filterQuoteQA,
|
||||
quotePrompt,
|
||||
quoteText,
|
||||
hasQuoteOutput: !!filterQuoteQA.find((item) => item.a)
|
||||
};
|
||||
}
|
||||
function getChatMessages({
|
||||
quotePrompt,
|
||||
quoteText,
|
||||
history = [],
|
||||
systemPrompt,
|
||||
limitPrompt,
|
||||
@@ -229,32 +233,28 @@ function getChatMessages({
|
||||
model,
|
||||
hasQuoteOutput
|
||||
}: {
|
||||
quotePrompt: string;
|
||||
quotePrompt?: string;
|
||||
quoteText: string;
|
||||
history: ChatProps['history'];
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
limitPrompt?: string;
|
||||
userChatInput: string;
|
||||
model: ChatModelItemType;
|
||||
hasQuoteOutput: boolean;
|
||||
}) {
|
||||
const { quoteGuidePrompt } = getDefaultPrompt({ hasQuoteOutput });
|
||||
|
||||
const systemText = `${quotePrompt ? `${quoteGuidePrompt}\n\n` : ''}${systemPrompt}`;
|
||||
const question = hasQuoteOutput
|
||||
? replaceVariable(quotePrompt || defaultQuotePrompt, {
|
||||
quote: quoteText,
|
||||
question: userChatInput
|
||||
})
|
||||
: userChatInput;
|
||||
|
||||
const messages: ChatItemType[] = [
|
||||
...(systemText
|
||||
...(systemPrompt
|
||||
? [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: systemText
|
||||
}
|
||||
]
|
||||
: []),
|
||||
...(quotePrompt
|
||||
? [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: quotePrompt
|
||||
value: systemPrompt
|
||||
}
|
||||
]
|
||||
: []),
|
||||
@@ -269,7 +269,7 @@ function getChatMessages({
|
||||
: []),
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: userChatInput
|
||||
value: question
|
||||
}
|
||||
];
|
||||
|
||||
@@ -375,11 +375,3 @@ async function streamResponse({
|
||||
answer
|
||||
};
|
||||
}
|
||||
|
||||
function getDefaultPrompt({ hasQuoteOutput }: { hasQuoteOutput?: boolean }) {
|
||||
return {
|
||||
quoteGuidePrompt: `三引号引用的内容是我提供给你的知识库,它们拥有最高优先级。instruction 是相关介绍${
|
||||
hasQuoteOutput ? ',output 是预期回答或补充。' : '。'
|
||||
}`
|
||||
};
|
||||
}
|
||||
|
10
client/src/types/core/aiChat.d.ts
vendored
Normal file
10
client/src/types/core/aiChat.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export type AIChatProps = {
|
||||
model: string;
|
||||
systemPrompt: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
quoteTemplate?: string;
|
||||
quotePrompt?: string;
|
||||
frequency: number;
|
||||
presence: number;
|
||||
};
|
11
client/src/types/mongoSchema.d.ts
vendored
11
client/src/types/mongoSchema.d.ts
vendored
@@ -50,17 +50,6 @@ export interface AppSchema {
|
||||
collection: number;
|
||||
};
|
||||
modules: AppModuleItemType[];
|
||||
chat?: {
|
||||
relatedKbs: string[];
|
||||
searchSimilarity: number;
|
||||
searchLimit: number;
|
||||
searchEmptyText: string;
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
chatModel: ChatModelType; // 聊天时用的模型,训练后就是训练的模型
|
||||
};
|
||||
}
|
||||
|
||||
export interface CollectionSchema {
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import type { AppModuleItemType, VariableItemType } from '@/types/app';
|
||||
import { chatModelList, vectorModelList } from '@/store/static';
|
||||
import { chatModelList } from '@/store/static';
|
||||
import {
|
||||
FlowInputItemTypeEnum,
|
||||
FlowModuleTypeEnum,
|
||||
@@ -7,20 +7,12 @@ import {
|
||||
SpecialInputKeyEnum
|
||||
} from '@/constants/flow';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { TaskResponseKeyEnum } from '@/constants/chat';
|
||||
import type { SelectedKbType } from '@/types/plugin';
|
||||
import { FlowInputItemType } from '@/types/flow';
|
||||
import type { AIChatProps } from '@/types/core/aiChat';
|
||||
|
||||
export type EditFormType = {
|
||||
chatModel: {
|
||||
model: string;
|
||||
systemPrompt: string;
|
||||
limitPrompt: string;
|
||||
temperature: number;
|
||||
maxToken: number;
|
||||
frequency: number;
|
||||
presence: number;
|
||||
};
|
||||
chatModel: AIChatProps;
|
||||
kb: {
|
||||
list: SelectedKbType;
|
||||
searchSimilarity: number;
|
||||
@@ -41,8 +33,9 @@ export const getDefaultAppForm = (): EditFormType => {
|
||||
chatModel: {
|
||||
model: defaultChatModel.model,
|
||||
systemPrompt: '',
|
||||
limitPrompt: '',
|
||||
temperature: 0,
|
||||
quotePrompt: '',
|
||||
quoteTemplate: '',
|
||||
maxToken: defaultChatModel.contextMaxToken / 2,
|
||||
frequency: 0.5,
|
||||
presence: -0.5
|
||||
@@ -109,9 +102,14 @@ export const appModules2Form = (modules: AppModuleItemType[]) => {
|
||||
key: 'systemPrompt'
|
||||
});
|
||||
updateVal({
|
||||
formKey: 'chatModel.limitPrompt',
|
||||
formKey: 'chatModel.quoteTemplate',
|
||||
inputs: module.inputs,
|
||||
key: 'limitPrompt'
|
||||
key: 'quoteTemplate'
|
||||
});
|
||||
updateVal({
|
||||
formKey: 'chatModel.quotePrompt',
|
||||
inputs: module.inputs,
|
||||
key: 'quotePrompt'
|
||||
});
|
||||
} else if (module.flowType === FlowModuleTypeEnum.kbSearchNode) {
|
||||
updateVal({
|
||||
@@ -178,16 +176,23 @@ const chatModelInput = (formData: EditFormType): FlowInputItemType[] => [
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
value: formData.chatModel.systemPrompt,
|
||||
value: formData.chatModel.systemPrompt || '',
|
||||
type: 'textarea',
|
||||
label: '系统提示词',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'limitPrompt',
|
||||
type: 'textarea',
|
||||
value: formData.chatModel.limitPrompt,
|
||||
label: '限定词',
|
||||
key: 'quoteTemplate',
|
||||
value: formData.chatModel.quoteTemplate || '',
|
||||
type: 'hidden',
|
||||
label: '引用内容模板',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
key: 'quotePrompt',
|
||||
value: formData.chatModel.quotePrompt || '',
|
||||
type: 'hidden',
|
||||
label: '引用内容提示词',
|
||||
connected: true
|
||||
},
|
||||
{
|
||||
|
@@ -81,7 +81,7 @@ export function sliceMessagesTB({
|
||||
const tokens = countPromptTokens(item.content, item.role);
|
||||
reduceTokens -= tokens;
|
||||
|
||||
if (tokens > 0) {
|
||||
if (reduceTokens > 0) {
|
||||
result.push(messages[i]);
|
||||
} else {
|
||||
break;
|
||||
|
12
client/src/utils/common/tools/text.ts
Normal file
12
client/src/utils/common/tools/text.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
/*
|
||||
replace {{variable}} to value
|
||||
*/
|
||||
export function replaceVariable(text: string, obj: Record<string, string>) {
|
||||
for (const key in obj) {
|
||||
const val = obj[key];
|
||||
if (typeof val !== 'string') continue;
|
||||
|
||||
text = text.replace(new RegExp(`{{(${key})}}`, 'g'), val);
|
||||
}
|
||||
return text || '';
|
||||
}
|
Reference in New Issue
Block a user