Add modal to show completion response data (#324)

This commit is contained in:
Archer
2023-09-19 20:31:45 +08:00
committed by GitHub
parent ce7da2db66
commit ac4355d2e1
24 changed files with 486 additions and 156 deletions

View File

@@ -78,6 +78,28 @@
"share": "Share",
"test": "Test Chat "
},
"response": {
"module cq": "Question classification list",
"module cq result": "Classification Result",
"module extract description": "Extract Description",
"module extract result": "Extract Result",
"module historyPreview": "Messages",
"module http body": "Body",
"module http result": "Response",
"module http url": "Request Url",
"module limit": "Count Limit",
"module maxToken": "MaxTokens",
"module model": "Model",
"module name": "Name",
"module price": "Price",
"module question": "Question",
"module quoteList": "Quotes",
"module runningTime": "Time",
"module similarity": "Similarity",
"module temperature": "Temperature",
"module time": "Running Time",
"module tokens": "Tokens"
},
"retry": "Retry"
},
"common": {

View File

@@ -78,6 +78,28 @@
"share": "外部链接调用",
"test": "测试"
},
"response": {
"module cq": "问题分类列表",
"module cq result": "分类结果",
"module extract description": "提取要求描述",
"module extract result": "提取结果",
"module historyPreview": "完整记录",
"module http body": "请求体",
"module http result": "响应体",
"module http url": "请求地址",
"module limit": "单次搜索上限",
"module maxToken": "最大 Tokens",
"module model": "模型",
"module name": "模型名",
"module price": "计费",
"module question": "问题",
"module quoteList": "引用内容",
"module runningTime": "运行时长",
"module similarity": "相似度",
"module temperature": "温度",
"module time": "运行时长",
"module tokens": "Tokens"
},
"retry": "重新生成"
},
"common": {
@@ -228,7 +250,7 @@
"Help Document": "帮助文档"
},
"template": {
"Quote Content Tip": "该配置只有传入引用内容(知识库搜索)时生效。\n可以自定义引用内容的结构以更好的适配不同场景。可以使用 {{q}}, {{a}}, {{source}} 来作为 “检索内容”、“预期内容”和“来源”,他们都是可选的,下面是默认值:\n{{default}}",
"Quote Content Tip": "该配置只有传入引用内容(知识库搜索)时生效。\n可以自定义引用内容的结构以更好的适配不同场景。可以使用一些变量来进行模板配置:\n{{q}} - 检索内容, {{a}} - 预期内容, {{source}} - 来源,{{file_id}} - 来源文件名,{{index}} - 第n个引用,他们都是可选的,下面是默认值:\n{{default}}",
"Quote Prompt Tip": "该配置只有传入引用内容(知识库搜索)时生效。\n可以用 {{quote}} 来插入引用内容,使用 {{question}} 来插入问题。下面是默认值:\n{{default}}"
},
"user": {

View File

@@ -1,12 +1,15 @@
import { GET, POST } from './request';
import { POST } from './request';
export const textCensor = (data: { text: string }) =>
POST<{ code?: number; message: string }>('/plugins/censor/text_baidu', data)
.then((res) => {
if (res?.code === 5000) {
return Promise.reject(res.message);
return Promise.reject(res);
}
})
.catch((err) => {
if (err?.code === 5000) {
return Promise.reject(err.message);
}
return Promise.resolve('');
});

View File

@@ -32,16 +32,18 @@ const ResponseTags = ({
} = useDisclosure();
const {
chatAccount,
quoteList = [],
historyPreview = [],
runningTime = 0
} = useMemo(() => {
const chatData = responseData.find((item) => item.moduleType === FlowModuleTypeEnum.chatNode);
if (!chatData) return {};
return {
quoteList: chatData.quoteList,
historyPreview: chatData.historyPreview,
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0)
chatAccount: responseData.filter((item) => item.moduleType === FlowModuleTypeEnum.chatNode)
.length,
quoteList: chatData?.quoteList,
historyPreview: chatData?.historyPreview,
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
};
}, [responseData]);
@@ -54,6 +56,8 @@ const ResponseTags = ({
return responseData.length === 0 ? null : (
<Flex alignItems={'center'} mt={2} flexWrap={'wrap'}>
{chatAccount === 1 ? (
<>
{quoteList.length > 0 && (
<MyTooltip label="查看引用">
<Tag
@@ -78,12 +82,21 @@ const ResponseTags = ({
</Tag>
</MyTooltip>
)}
</>
) : (
<Tag colorSchema="blue" {...TagStyles}>
AI
</Tag>
)}
{isPc && runningTime > 0 && (
<MyTooltip label={'模块运行时间和'}>
<Tag colorSchema="purple" cursor={'default'} {...TagStyles}>
{runningTime}s
</Tag>
</MyTooltip>
)}
<MyTooltip label={'点击查看完整响应'}>
<MyTooltip label={'点击查看完整响应'}>
<Tag colorSchema="gray" cursor={'pointer'} {...TagStyles} onClick={onOpenWholeModal}>
{t('chat.Complete Response')}
</Tag>

View File

@@ -1,11 +1,36 @@
import React, { useMemo } from 'react';
import { Box, ModalBody, useTheme, Flex } from '@chakra-ui/react';
import React, { useMemo, useState } from 'react';
import { Box, useTheme, Flex, Image } from '@chakra-ui/react';
import type { ChatHistoryItemResType } from '@/types/chat';
import { useTranslation } from 'react-i18next';
import { ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
import Tabs from '../Tabs';
import MyModal from '../MyModal';
import MyTooltip from '../MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { formatPrice } from '@/utils/user';
function Row({ label, value }: { label: string; value?: string | number | React.ReactNode }) {
const theme = useTheme();
return value !== undefined && value !== '' && value !== 'undefined' ? (
<Box mb={2}>
<Box fontSize={['sm', 'md']} mb={1} flex={'0 0 90px'}>
{label}:
</Box>
<Box
borderRadius={'lg'}
border={theme.borders.base}
px={3}
py={1}
position={'relative'}
whiteSpace={'pre-wrap'}
fontSize={'sm'}
>
{value}
</Box>
</Box>
) : null;
}
const ResponseModal = ({
response,
@@ -14,56 +39,159 @@ const ResponseModal = ({
response: ChatHistoryItemResType[];
onClose: () => void;
}) => {
const { t } = useTranslation();
const theme = useTheme();
const { t } = useTranslation();
const formatResponse = useMemo(
const list = useMemo(
() =>
response.map((item) => {
const copy = { ...item };
delete copy.historyPreview;
delete copy.quoteList;
return copy;
}),
response.map((item, i) => ({
label: (
<Flex alignItems={'center'} justifyContent={'center'} px={2}>
<Image
mr={2}
src={
ModuleTemplatesFlat.find((template) => item.moduleType === template.flowType)?.logo
}
alt={''}
w={['14px', '16px']}
/>
{item.moduleName}
</Flex>
),
id: `${i}`
})),
[response]
);
const [currentTab, setCurrentTab] = useState(`0`);
const activeModule = useMemo(() => response[Number(currentTab)], [currentTab, response]);
return (
<MyModal
isCentered
isOpen={true}
onClose={onClose}
h={['90vh', '80vh']}
minW={['90vw', '600px']}
w={['90vw', '500px']}
title={
<Flex alignItems={'center'}>
{t('chat.Complete Response')}
<MyTooltip
label={
'moduleName: 模型名\nprice: 价格倍率100000\nmodel?: 模型名\ntokens?: token 消耗\n\nanswer?: 回答内容\nquestion?: 问题\ntemperature?: 温度\nmaxToken?: 最大 tokens\n\nsimilarity?: 相似度\nlimit?: 单次搜索结果\n\ncqList?: 问题分类列表\ncqResult?: 分类结果\n\nextractDescription?: 内容提取描述\nextractResult?: 提取结果'
}
>
<MyTooltip label={'从左往右,为各个模块的响应顺序'}>
<QuestionOutlineIcon ml={2} />
</MyTooltip>
</Flex>
}
isCentered
>
<ModalBody>
{formatResponse.map((item, i) => (
<Box
key={i}
p={2}
pt={[0, 2]}
borderRadius={'lg'}
border={theme.borders.base}
_notLast={{ mb: 2 }}
position={'relative'}
whiteSpace={'pre-wrap'}
>
{JSON.stringify(item, null, 2)}
<Flex h={'100%'} flexDirection={'column'}>
<Box>
<Tabs list={list} activeId={currentTab} onChange={setCurrentTab} />
</Box>
<Box py={2} px={4} flex={'1 0 0'} overflow={'auto'}>
<Row label={t('chat.response.module name')} value={activeModule?.moduleName} />
<Row
label={t('chat.response.module price')}
value={`${formatPrice(activeModule?.price)}`}
/>
<Row
label={t('chat.response.module time')}
value={`${activeModule?.runningTime || 0}s`}
/>
<Row label={t('chat.response.module tokens')} value={`${activeModule?.tokens}`} />
<Row label={t('chat.response.module model')} value={activeModule?.model} />
{/* ai chat */}
<Row label={t('chat.response.module question')} value={activeModule?.question} />
<Row label={t('chat.response.module temperature')} value={activeModule?.temperature} />
<Row label={t('chat.response.module maxToken')} value={activeModule?.maxToken} />
<Row
label={t('chat.response.module quoteList')}
value={(() => {
try {
JSON.stringify(activeModule.quoteList, null, 2);
} catch (error) {
return '';
}
})()}
/>
<Row
label={t('chat.response.module historyPreview')}
value={(() => {
if (!activeModule?.historyPreview) return '';
return (
<>
{activeModule.historyPreview.map((item, i) => (
<Box key={i} _notLast={{ mb: 3, borderBottom: theme.borders.base }} pb={3}>
<Box fontWeight={'bold'}>{item.obj}</Box>
<Box>{item.value}</Box>
</Box>
))}
</ModalBody>
</>
);
})()}
/>
{/* dataset search */}
<Row label={t('chat.response.module similarity')} value={activeModule?.similarity} />
<Row label={t('chat.response.module limit')} value={activeModule?.limit} />
{/* classify question */}
<Row
label={t('chat.response.module cq')}
value={(() => {
if (!activeModule?.cqList) return '';
return (
<Box as={'ol'} px={3}>
{activeModule.cqList.map((item) => (
<Box key={item.key} as={'li'}>
{item.value}
</Box>
))}
</Box>
);
})()}
/>
<Row label={t('chat.response.module cq result')} value={activeModule?.cqResult} />
{/* extract */}
<Row
label={t('chat.response.module extract description')}
value={activeModule?.extractDescription}
/>
<Row
label={t('chat.response.module extract result')}
value={(() => {
try {
return JSON.stringify(activeModule?.extractResult, null, 2);
} catch (error) {
return '';
}
})()}
/>
{/* http */}
<Row
label={t('chat.response.module http body')}
value={(() => {
try {
return JSON.stringify(activeModule?.body, null, 2);
} catch (error) {
return '';
}
})()}
/>
<Row
label={t('chat.response.module http result')}
value={(() => {
try {
return JSON.stringify(activeModule?.httpResult, null, 2);
} catch (error) {
return '';
}
})()}
/>
</Box>
</Flex>
</MyModal>
);
};

View File

@@ -0,0 +1,64 @@
import React, { useMemo } from 'react';
import { Box, useTheme } from '@chakra-ui/react';
import { getFileAndOpen } from '@/utils/common/file';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
type QuoteItemType = {
file_id?: string;
filename: string;
};
const QuoteBlock = ({ code }: { code: string }) => {
const theme = useTheme();
const { toast } = useToast();
const quoteList = useMemo(() => {
try {
return JSON.parse(code) as QuoteItemType[];
} catch (error) {
return [];
}
}, [code]);
return (
<Box mt={3} pt={2} borderTop={theme.borders.base}>
{quoteList.length > 0 ? (
<>
<Box>:</Box>
<Box as={'ol'}>
{quoteList.map((item, i) => (
<Box
key={i}
as={'li'}
{...(item.file_id
? {
textDecoration: 'underline',
color: 'myBlue.800',
cursor: 'pointer'
}
: {})}
onClick={async () => {
if (!item.file_id) return;
try {
await getFileAndOpen(item.file_id);
} catch (error) {
toast({
status: 'warning',
title: getErrText(error, '打开文件失败')
});
}
}}
>
{item.filename}
</Box>
))}
</Box>
</>
) : (
<Box></Box>
)}
</Box>
);
};
export default QuoteBlock;

View File

@@ -15,21 +15,32 @@ const MermaidCodeBlock = dynamic(() => import('./img/MermaidCodeBlock'));
const MdImage = dynamic(() => import('./img/Image'));
const ChatGuide = dynamic(() => import('./chat/Guide'));
const EChartsCodeBlock = dynamic(() => import('./img/EChartsCodeBlock'));
const QuoteBlock = dynamic(() => import('./chat/Quote'));
export enum CodeClassName {
guide = 'guide',
mermaid = 'mermaid',
echarts = 'echarts',
quote = 'quote'
}
function Code({ inline, className, children }: any) {
const match = /language-(\w+)/.exec(className || '');
const codeType = match?.[1];
if (codeType === 'mermaid') {
if (codeType === CodeClassName.mermaid) {
return <MermaidCodeBlock code={String(children)} />;
}
if (codeType === 'guide') {
if (codeType === CodeClassName.guide) {
return <ChatGuide text={String(children)} />;
}
if (codeType === 'echarts') {
if (codeType === CodeClassName.echarts) {
return <EChartsCodeBlock code={String(children)} />;
}
if (codeType === CodeClassName.quote) {
return <QuoteBlock code={String(children)} />;
}
return (
<CodeLight className={className} inline={inline} match={match}>
{children}

View File

@@ -5,7 +5,7 @@ import { useTranslation } from 'react-i18next';
// @ts-ignore
interface Props extends GridProps {
list: { id: string; label: string }[];
list: { id: string; label: string | React.ReactNode }[];
activeId: string;
size?: 'sm' | 'md' | 'lg';
onChange: (id: string) => void;
@@ -23,13 +23,13 @@ const Tabs = ({ list, size = 'md', activeId, onChange, ...props }: Props) => {
};
case 'md':
return {
fontSize: 'md',
fontSize: ['sm', 'md'],
outP: '4px',
inlineP: 1
};
case 'lg':
return {
fontSize: 'lg',
fontSize: ['md', 'lg'],
outP: '5px',
inlineP: 2
};
@@ -68,7 +68,7 @@ const Tabs = ({ list, size = 'md', activeId, onChange, ...props }: Props) => {
onChange(item.id);
}}
>
{t(item.label) || item.label}
{typeof item.label === 'string' ? t(item.label) : item.label}
</Box>
))}
</Grid>

View File

@@ -32,6 +32,7 @@ import { getSystemTime } from '@/utils/user';
import { authOutLinkChat } from '@/service/support/outLink/auth';
import requestIp from 'request-ip';
import { replaceVariable } from '@/utils/common/tools/text';
import { ModuleDispatchProps } from '@/types/core/modules';
export type MessageItemType = ChatCompletionRequestMessage & { dataId?: string };
type FastGptWebChatProps = {
@@ -365,13 +366,15 @@ export async function dispatchModules({
module.inputs.forEach((item: any) => {
params[item.key] = item.value;
});
const props: Record<string, any> = {
const props: ModuleDispatchProps<Record<string, any>> = {
res,
stream,
detail,
variables,
moduleName: module.name,
outputs: module.outputs,
userOpenaiAccount: user?.openaiAccount,
...params
inputs: params
};
const dispatchRes = await (async () => {

View File

@@ -4,6 +4,7 @@ import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import jwt from 'jsonwebtoken';
import { ERROR_ENUM } from '@/service/errorCode';
import { GridFSStorage } from '@/service/lib/gridfs';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -17,6 +18,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { userId } = await authUser({ req });
// auth file
const gridFs = new GridFSStorage('dataset', userId);
await gridFs.findAndAuthFile(fileId);
const token = await createFileToken({
userId,
fileId

View File

@@ -2,7 +2,6 @@ import React, { useState, useCallback } from 'react';
import { Box, Flex, Button, Textarea, IconButton, BoxProps } from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { insertData2Kb, putKbDataById, delOneKbDataByDataId } from '@/api/plugins/kb';
import { getFileViewUrl } from '@/api/support/file';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
import MyIcon from '@/components/Icon';
@@ -13,6 +12,7 @@ import { useQuery } from '@tanstack/react-query';
import { DatasetItemType } from '@/types/plugin';
import { useTranslation } from 'react-i18next';
import { useDatasetStore } from '@/store/dataset';
import { getFileAndOpen } from '@/utils/common/file';
export type FormData = { dataId?: string } & DatasetItemType;
@@ -267,9 +267,7 @@ export function RawFileText({ fileId, filename = '', ...props }: RawFileTextProp
textDecoration: 'underline',
onClick: async () => {
try {
const url = await getFileViewUrl(fileId);
const asPath = `${location.origin}${url}`;
window.open(asPath, '_blank');
await getFileAndOpen(fileId);
} catch (error) {
toast({
title: getErrText(error, '获取文件地址失败'),

View File

@@ -5,19 +5,18 @@ import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice } from '@/service/events/pushBill';
import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
export type CQProps = {
export type CQProps = ModuleDispatchProps<{
systemPrompt?: string;
history?: ChatItemType[];
[SystemInputEnum.userChatInput]: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
[SpecialInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
};
}>;
export type CQResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[key: string]: any;
@@ -29,7 +28,11 @@ const maxTokens = 3000;
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Record<string, any>): Promise<CQResponse> => {
const { agents, systemPrompt, history = [], userChatInput, userOpenaiAccount } = props as CQProps;
const {
moduleName,
userOpenaiAccount,
inputs: { agents, systemPrompt, history = [], userChatInput }
} = props as CQProps;
if (!userChatInput) {
return Promise.reject('Input is empty');
@@ -97,6 +100,7 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.classifyQuestion,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model: agentModel, tokens }),
model: getModel(agentModel)?.name || agentModel,
tokens,

View File

@@ -6,17 +6,16 @@ import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { countModelPrice } from '@/service/events/pushBill';
import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
export type Props = {
userOpenaiAccount: UserModelSchema['openaiAccount'];
export type Props = ModuleDispatchProps<{
history?: ChatItemType[];
[ContextExtractEnum.content]: string;
[ContextExtractEnum.extractKeys]: ContextExtractAgentItemType[];
[ContextExtractEnum.description]: string;
};
}>;
export type Response = {
[ContextExtractEnum.success]?: boolean;
[ContextExtractEnum.failed]?: boolean;
@@ -29,11 +28,9 @@ const agentFunName = 'agent_extract_data';
const maxTokens = 4000;
export async function dispatchContentExtract({
moduleName,
userOpenaiAccount,
content,
extractKeys,
history = [],
description
inputs: { content, extractKeys, history = [], description }
}: Props): Promise<Response> {
if (!content) {
return Promise.reject('Input is empty');
@@ -120,6 +117,7 @@ export async function dispatchContentExtract({
...arg,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.contentExtract,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model: agentModel, tokens }),
model: getModel(agentModel)?.name || agentModel,
tokens,

View File

@@ -11,7 +11,6 @@ import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/events/pushBill';
import { ChatModelItemType } from '@/types/model';
import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';
@@ -21,19 +20,16 @@ import { defaultQuotePrompt, defaultQuoteTemplate } from '@/prompts/core/AIChat'
import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
export type ChatProps = AIChatProps & {
res: NextApiResponse;
history?: ChatItemType[];
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
userChatInput: string;
stream?: boolean;
detail?: boolean;
history?: ChatItemType[];
quoteQA?: QuoteItemType[];
systemPrompt?: string;
limitPrompt?: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
outputs: AppModuleItemType['outputs'];
};
}
>;
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
@@ -41,24 +37,27 @@ export type ChatResponse = {
};
/* request openai chat */
export const dispatchChatCompletion = async (props: Record<string, any>): Promise<ChatResponse> => {
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
let {
res,
moduleName,
stream = false,
detail = false,
userOpenaiAccount,
outputs,
inputs: {
model = global.chatModels[0]?.model,
temperature = 0,
maxToken = 4000,
stream = false,
detail = false,
history = [],
quoteQA = [],
userChatInput,
systemPrompt = '',
limitPrompt,
quoteTemplate,
quotePrompt,
userOpenaiAccount,
outputs
} = props as ChatProps;
quotePrompt
}
} = props;
if (!userChatInput) {
return Promise.reject('Question is empty');
}
@@ -177,6 +176,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
@@ -194,15 +194,18 @@ function filterQuote({
model,
quoteTemplate
}: {
quoteQA: ChatProps['quoteQA'];
quoteQA: ChatProps['inputs']['quoteQA'];
model: ChatModelItemType;
quoteTemplate?: string;
}) {
const sliceResult = sliceMessagesTB({
maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item) => ({
messages: quoteQA.map((item, index) => ({
obj: ChatRoleEnum.System,
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, item)
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
...item,
index: `${index + 1}`
})
}))
});
@@ -212,7 +215,12 @@ function filterQuote({
const quoteText =
filterQuoteQA.length > 0
? `${filterQuoteQA
.map((item) => replaceVariable(quoteTemplate || defaultQuoteTemplate, item))
.map((item, index) =>
replaceVariable(quoteTemplate || defaultQuoteTemplate, {
...item,
index: `${index + 1}`
})
)
.join('\n')}`
: '';
@@ -232,7 +240,7 @@ function getChatMessages({
}: {
quotePrompt?: string;
quoteText: string;
history: ChatProps['history'];
history: ChatProps['inputs']['history'];
systemPrompt: string;
limitPrompt?: string;
userChatInput: string;
@@ -288,7 +296,7 @@ function getMaxTokens({
}: {
maxToken: number;
model: ChatModelItemType;
filterMessages: ChatProps['history'];
filterMessages: ChatProps['inputs']['history'];
}) {
const tokensLimit = model.contextMaxToken;
/* count response max token */

View File

@@ -1,13 +1,16 @@
import { SystemInputEnum } from '@/constants/app';
import { ChatItemType } from '@/types/chat';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type HistoryProps = {
export type HistoryProps = ModuleDispatchProps<{
maxContext: number;
[SystemInputEnum.history]: ChatItemType[];
};
}>;
export const dispatchHistory = (props: Record<string, any>) => {
const { maxContext = 5, history = [] } = props as HistoryProps;
const {
inputs: { maxContext = 5, history = [] }
} = props as HistoryProps;
return {
history: maxContext > 0 ? history.slice(-maxContext) : []

View File

@@ -1,11 +1,14 @@
import { SystemInputEnum } from '@/constants/app';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type UserChatInputProps = {
export type UserChatInputProps = ModuleDispatchProps<{
[SystemInputEnum.userChatInput]: string;
};
}>;
export const dispatchChatInput = (props: Record<string, any>) => {
const { userChatInput } = props as UserChatInputProps;
const {
inputs: { userChatInput }
} = props as UserChatInputProps;
return {
userChatInput
};

View File

@@ -7,13 +7,14 @@ import type { SelectedKbType } from '@/types/plugin';
import type { QuoteItemType } from '@/types/chat';
import { PgDatasetTableName } from '@/constants/plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
type KBSearchProps = {
type KBSearchProps = ModuleDispatchProps<{
kbList: SelectedKbType;
similarity: number;
limit: number;
userChatInput: string;
};
}>;
export type KBSearchResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
isEmpty?: boolean;
@@ -22,7 +23,10 @@ export type KBSearchResponse = {
};
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
const { kbList = [], similarity = 0.4, limit = 5, userChatInput } = props as KBSearchProps;
const {
moduleName,
inputs: { kbList = [], similarity = 0.4, limit = 5, userChatInput }
} = props as KBSearchProps;
if (kbList.length === 0) {
return Promise.reject("You didn't choose the knowledge base");
@@ -59,6 +63,7 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
quoteQA: searchRes,
responseData: {
moduleType: FlowModuleTypeEnum.kbSearchNode,
moduleName,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
model: vectorModel.name,
tokens: tokenLen,

View File

@@ -1,21 +1,23 @@
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { NextApiResponse } from 'next';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type AnswerProps = {
res: NextApiResponse;
detail?: boolean;
export type AnswerProps = ModuleDispatchProps<{
text: string;
stream: boolean;
};
}>;
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const { res, detail, text = '', stream } = props as AnswerProps;
const {
res,
detail,
stream,
inputs: { text = '' }
} = props as AnswerProps;
if (stream) {
sseResponse({

View File

@@ -1,16 +1,13 @@
import { TaskResponseKeyEnum } from '@/constants/chat';
import { HttpPropsEnum } from '@/constants/flow/flowField';
import { ChatHistoryItemResType } from '@/types/chat';
import type { NextApiResponse } from 'next';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
export type HttpRequestProps = {
res: NextApiResponse;
stream: boolean;
userOpenaiAccount: any;
export type HttpRequestProps = ModuleDispatchProps<{
[HttpPropsEnum.url]: string;
[key: string]: any;
};
}>;
export type HttpResponse = {
[HttpPropsEnum.finish]: boolean;
[HttpPropsEnum.failed]?: boolean;
@@ -19,16 +16,30 @@ export type HttpResponse = {
};
export const dispatchHttpRequest = async (props: Record<string, any>): Promise<HttpResponse> => {
const { res, stream, userOpenaiAccount, url, ...body } = props as HttpRequestProps;
const {
moduleName,
variables,
inputs: { url, ...body }
} = props as HttpRequestProps;
const requestBody = {
variables,
...body
};
try {
const response = await fetchData({ url, body });
const response = await fetchData({
url,
body: requestBody
});
return {
[HttpPropsEnum.finish]: true,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.httpRequest,
moduleName,
price: 0,
body: requestBody,
httpResult: response
},
...response
@@ -39,8 +50,10 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
[HttpPropsEnum.failed]: true,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.httpRequest,
moduleName,
price: 0,
httpResult: {}
body: requestBody,
httpResult: { error }
}
};
}

View File

@@ -50,10 +50,11 @@ export type QuoteItemType = KbDataItemType & {
// response data
export type ChatHistoryItemResType = {
moduleType: `${FlowModuleTypeEnum}`;
moduleName: string;
price: number;
runningTime?: number;
model?: string;
tokens?: number;
model?: string;
// chat
question?: string;
@@ -62,7 +63,7 @@ export type ChatHistoryItemResType = {
quoteList?: QuoteItemType[];
historyPreview?: ChatItemType[]; // completion context array. history will slice
// kb search
// dataset search
similarity?: number;
limit?: number;
@@ -75,5 +76,6 @@ export type ChatHistoryItemResType = {
extractResult?: Record<string, any>;
// http
body?: Record<string, any>;
httpResult?: Record<string, any>;
};

View File

@@ -1,6 +1,7 @@
/* ai chat modules props */
export type AIChatProps = {
model: string;
systemPrompt: string;
systemPrompt?: string;
temperature: number;
maxToken: number;
quoteTemplate?: string;

15
client/src/types/core/modules.d.ts vendored Normal file
View File

@@ -0,0 +1,15 @@
import type { NextApiResponse } from 'next';
import { RunningModuleItemType } from '../app';
import { UserModelSchema } from '../mongoSchema';
// module dispatch props type
export type ModuleDispatchProps<T> = {
res: NextApiResponse;
moduleName: string;
stream: boolean;
detail: boolean;
variables: Record<string, any>;
outputs: RunningModuleItemType['outputs'];
userOpenaiAccount?: UserModelSchema['openaiAccount'];
inputs: T;
};

View File

@@ -31,12 +31,12 @@ export const getDefaultAppForm = (): EditFormType => {
return {
chatModel: {
model: defaultChatModel.model,
model: defaultChatModel?.model,
systemPrompt: '',
temperature: 0,
quotePrompt: '',
quoteTemplate: '',
maxToken: defaultChatModel.contextMaxToken / 2,
maxToken: defaultChatModel ? defaultChatModel.contextMaxToken / 2 : 4000,
frequency: 0.5,
presence: -0.5
},

View File

@@ -0,0 +1,7 @@
import { getFileViewUrl } from '@/api/support/file';
export async function getFileAndOpen(fileId: string) {
const url = await getFileViewUrl(fileId);
const asPath = `${location.origin}${url}`;
window.open(asPath, '_blank');
}