perf: completion dispatch

This commit is contained in:
archer
2023-07-23 14:07:59 +08:00
parent 8151350d9f
commit 6027a966d2
33 changed files with 1797 additions and 2181 deletions

View File

@@ -4,7 +4,7 @@ import type { InitChatResponse, InitShareChatResponse } from './response/chat';
import { RequestPaging } from '../types/index';
import type { ShareChatSchema } from '@/types/mongoSchema';
import type { ShareChatEditType } from '@/types/app';
import type { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import type { QuoteItemType } from '@/types/chat';
import type { Props as UpdateHistoryProps } from '@/pages/api/chat/history/updateChatHistory';
/**

View File

@@ -1,8 +1,7 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
import { parseStreamChunk } from '@/utils/adapt';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import { rawSearchKey } from '@/constants/chat';
import { QuoteItemType } from '@/types/chat';
interface StreamFetchProps {
url?: string;
@@ -20,7 +19,6 @@ export const streamFetch = ({
responseText: string;
errMsg: string;
newChatId: string | null;
[rawSearchKey]: QuoteItemType[];
}>(async (resolve, reject) => {
try {
const response = await window.fetch(url, {
@@ -43,7 +41,6 @@ export const streamFetch = ({
// response data
let responseText = '';
let rawSearch: QuoteItemType[] = [];
let errMsg = '';
const newChatId = response.headers.get('newChatId');
@@ -55,8 +52,7 @@ export const streamFetch = ({
return resolve({
responseText,
errMsg,
newChatId,
rawSearch
newChatId
});
} else {
return reject({
@@ -82,7 +78,6 @@ export const streamFetch = ({
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.appStreamResponse) {
rawSearch = data?.[rawSearchKey] ? data[rawSearchKey] : rawSearch;
} else if (item.event === sseResponseEventEnum.error) {
errMsg = getErrText(data, '流响应错误');
}
@@ -93,8 +88,7 @@ export const streamFetch = ({
return resolve({
responseText,
errMsg,
newChatId,
rawSearch
newChatId
});
}
reject(getErrText(err, '请求异常'));

View File

@@ -17,7 +17,7 @@ import { useQuery } from '@tanstack/react-query';
import { getHistoryQuote, updateHistoryQuote } from '@/api/chat';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import { QuoteItemType } from '@/types/chat';
const QuoteModal = ({
chatId,

View File

@@ -22,7 +22,6 @@ import { Box, Card, Flex, Input, Textarea, Button, useTheme } from '@chakra-ui/r
import { useUserStore } from '@/store/user';
import { feConfigs } from '@/store/static';
import { Types } from 'mongoose';
import { HUMAN_ICON, quoteLenKey, rawSearchKey } from '@/constants/chat';
import { EventNameEnum } from '../Markdown/constant';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
@@ -35,7 +34,7 @@ import { fileDownload } from '@/utils/file';
import { htmlTemplate } from '@/constants/common';
import { useRouter } from 'next/router';
import { useGlobalStore } from '@/store/global';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import { QuoteItemType } from '@/types/chat';
import { FlowModuleTypeEnum } from '@/constants/flow';
import dynamic from 'next/dynamic';
@@ -598,7 +597,7 @@ const ChatBox = (
source={item.value}
isChatting={index === chatHistory.length - 1 && isChatting}
/>
{(!!item[quoteLenKey] || !!item[rawSearchKey]?.length) && (
{/* {(!!item[quoteLenKey] || !!item[rawSearchKey]?.length) && (
<Button
size={'xs'}
variant={'base'}
@@ -613,7 +612,7 @@ const ChatBox = (
>
{item[quoteLenKey] || item[rawSearchKey]?.length}条引用
</Button>
)}
)} */}
</Card>
<Flex {...controlContainerStyle}>

View File

@@ -1,12 +1,4 @@
/* app */
export enum AppModuleItemTypeEnum {
'variable' = 'variable',
'userGuide' = 'userGuide',
'initInput' = 'initInput',
'http' = 'http', // send a http request
'switch' = 'switch', // one input and two outputs
'answer' = 'answer' // redirect response
}
export enum SystemInputEnum {
'welcomeText' = 'welcomeText',
'variables' = 'variables',
@@ -14,10 +6,7 @@ export enum SystemInputEnum {
'history' = 'history',
'userChatInput' = 'userChatInput'
}
export enum TaskResponseKeyEnum {
'answerText' = 'answerText', // answer module text key
'responseData' = 'responseData'
}
export enum VariableInputEnum {
input = 'input',
select = 'select'

View File

@@ -12,6 +12,11 @@ export enum ChatRoleEnum {
AI = 'AI'
}
export enum TaskResponseKeyEnum {
'answerText' = 'answerText', // answer module text key
'responseData' = 'responseData'
}
export const ChatRoleMap = {
[ChatRoleEnum.System]: {
name: '系统提示词'
@@ -46,10 +51,5 @@ export const ChatSourceMap = {
}
};
export const responseDataKey = 'responseData';
export const rawSearchKey = 'rawSearch';
export const quoteLenKey = 'quoteLen';
export const HUMAN_ICON = `https://fastgpt.run/icon/human.png`;
export const LOGO_ICON = `https://fastgpt.run/icon/logo.png`;

File diff suppressed because it is too large Load Diff

View File

@@ -19,9 +19,10 @@ export enum FlowOutputItemTypeEnum {
}
export enum FlowModuleTypeEnum {
empty = 'empty',
variable = 'variable',
userGuide = 'userGuide',
questionInputNode = 'questionInput',
questionInput = 'questionInput',
historyNode = 'historyNode',
chatNode = 'chatNode',
kbSearchNode = 'kbSearchNode',
@@ -30,6 +31,11 @@ export enum FlowModuleTypeEnum {
classifyQuestion = 'classifyQuestion'
}
export const initModuleType: Record<string, boolean> = {
[FlowModuleTypeEnum.historyNode]: true,
[FlowModuleTypeEnum.questionInput]: true
};
export const edgeOptions = {
style: {
strokeWidth: 1,

View File

@@ -1,20 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { SystemInputEnum } from '@/constants/app';
import { ChatItemType } from '@/types/chat';
export type Props = {
maxContext: number;
[SystemInputEnum.history]: ChatItemType[];
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const { maxContext = 5, history } = req.body as Props;
jsonRes(res, {
data: {
history: history.slice(-maxContext)
}
});
}

View File

@@ -1,17 +0,0 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { SystemInputEnum } from '@/constants/app';
export type Props = {
[SystemInputEnum.userChatInput]: string;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const { userChatInput } = req.body as Props;
jsonRes(res, {
data: {
userChatInput
}
});
}

View File

@@ -1,137 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum, rawSearchKey, responseDataKey } from '@/constants/chat';
import { modelToolMap } from '@/utils/plugin';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { getModel } from '@/service/utils/data';
import { authUser } from '@/service/utils/auth';
import type { SelectedKbType } from '@/types/plugin';
export type QuoteItemType = {
kb_id: string;
id: string;
q: string;
a: string;
source?: string;
};
type Props = {
kbList: SelectedKbType;
history: ChatItemType[];
similarity: number;
limit: number;
maxToken: number;
userChatInput: string;
stream?: boolean;
billId?: string;
};
type Response = {
[responseDataKey]: {
[rawSearchKey]: QuoteItemType[];
};
isEmpty?: boolean;
quotePrompt?: string;
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await authUser({ req, authRoot: true });
const { kbList = [], userChatInput } = req.body as Props;
if (!userChatInput) {
throw new Error('用户输入为空');
}
if (!Array.isArray(kbList) || kbList.length === 0) {
throw new Error('没有选择知识库');
}
const result = await kbSearch({
...req.body,
kbList,
userChatInput
});
jsonRes<Response>(res, {
data: result
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function kbSearch({
kbList = [],
history = [],
similarity = 0.8,
limit = 5,
maxToken = 2500,
userChatInput,
billId
}: Props): Promise<Response> {
if (kbList.length === 0) {
return Promise.reject('没有选择知识库');
}
// get vector
const vectorModel = global.vectorModels[0].model;
const { vectors, tokenLen } = await getVector({
model: vectorModel,
input: [userChatInput]
});
// search kb
const [res]: any = await Promise.all([
PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select kb_id,id,q,a,source from modelData where kb_id IN (${kbList
.map((item) => `'${item.kbId}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
vectors[0]
}]' limit ${limit};
COMMIT;`
),
pushTaskBillListItem({
billId,
moduleName: 'Vector Generate',
amount: countModelPrice({ model: vectorModel, tokens: tokenLen }),
model: getModel(vectorModel)?.name,
tokenLen
})
]);
const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
// filter part quote by maxToken
const sliceResult = modelToolMap
.tokenSlice({
maxToken,
messages: searchRes.map((item, i) => ({
obj: ChatRoleEnum.System,
value: `${i + 1}: [${item.q}\n${item.a}]`
}))
})
.map((item) => item.value)
.join('\n')
.trim();
// slice filterSearch
const rawSearch = searchRes.slice(0, sliceResult.length);
return {
isEmpty: rawSearch.length === 0 ? true : undefined,
quotePrompt: sliceResult ? `知识库:\n${sliceResult}` : undefined,
responseData: {
rawSearch
}
};
}

View File

@@ -8,7 +8,7 @@ import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt';
import { createTaskBill, delTaskBill, finishTaskBill } from '@/service/events/pushBill';
import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
@@ -31,7 +31,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
let { modules = [], history = [], prompt, variables = {}, appName, appId } = req.body as Props;
let billId = '';
try {
if (!history || !modules || !prompt) {
throw new Error('Prams Error');
@@ -45,13 +44,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
/* user auth */
const { userId } = await authUser({ req });
billId = await createTaskBill({
userId,
appName,
appId,
source: BillSourceEnum.fastgpt
});
/* start process */
const { responseData } = await dispatchModules({
res,
@@ -61,8 +53,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
history: gptMessage2ChatType(history),
userChatInput: prompt
},
stream: true,
billId
stream: true
});
sseResponse({
@@ -77,12 +68,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
res.end();
// bill
finishTaskBill({
billId
pushTaskBill({
appName,
appId,
userId,
source: BillSourceEnum.fastgpt,
response: responseData
});
} catch (err: any) {
delTaskBill(billId);
res.status(500);
sseErrRes(res, err);
res.end();

View File

@@ -2,21 +2,29 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authApp, authShareChat } from '@/service/utils/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import {
dispatchHistory,
dispatchChatInput,
dispatchChatCompletion,
dispatchKBSearch,
dispatchAnswer,
dispatchClassifyQuestion
} from '@/service/moduleDispatch';
import type { CreateChatCompletionRequest } from 'openai';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { gptMessage2ChatType } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { TaskResponseKeyEnum, AppModuleItemTypeEnum } from '@/constants/app';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { FlowModuleTypeEnum, initModuleType } from '@/constants/flow';
import { Types } from 'mongoose';
import { moduleFetch } from '@/service/api/request';
import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
import { FlowInputItemTypeEnum } from '@/constants/flow';
import { finishTaskBill, createTaskBill, delTaskBill } from '@/service/events/pushBill';
import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
import { ChatHistoryItemResType } from '@/types/chat';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
@@ -49,8 +57,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
let { chatId, appId, shareId, stream = false, messages = [], variables = {} } = req.body as Props;
let billId = '';
try {
if (!messages) {
throw new Error('Prams Error');
@@ -105,13 +111,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.setHeader('newChatId', String(newChatId));
}
billId = await createTaskBill({
userId,
appName: app.name,
appId,
source: authType === 'apikey' ? BillSourceEnum.api : BillSourceEnum.fastgpt
});
/* start process */
const { responseData, answerText } = await dispatchModules({
res,
@@ -121,9 +120,9 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
history: prompts,
userChatInput: prompt.value
},
stream,
billId
stream
});
console.log(responseData, '===', answerText);
if (!answerText) {
throw new Error('回复内容为空,可能模块编排出现问题');
@@ -169,10 +168,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
res.end();
} else {
res.json({
data: {
newChatId,
...responseData
},
responseData,
id: chatId || '',
model: '',
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
@@ -186,14 +182,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
}
// bill
finishTaskBill({
billId,
shareId
pushTaskBill({
appName: app.name,
appId,
userId,
source: authType === 'apikey' ? BillSourceEnum.api : BillSourceEnum.fastgpt,
response: responseData
});
} catch (err: any) {
delTaskBill(billId);
if (stream) {
sseErrRes(res, err);
res.end();
@@ -211,35 +207,29 @@ export async function dispatchModules({
modules,
params = {},
variables = {},
stream = false,
billId
stream = false
}: {
res: NextApiResponse;
modules: AppModuleItemType[];
params?: Record<string, any>;
variables?: Record<string, any>;
billId: string;
stream?: boolean;
}) {
const runningModules = loadModules(modules, variables);
// let storeData: Record<string, any> = {}; // after module used
let chatResponse: Record<string, any> = {}; // response request and save to database
let answerText = ''; // AI answer
let chatResponse: ChatHistoryItemResType[] = []; // response request and save to database
let chatAnswerText = ''; // AI answer
function pushStore({
answer,
responseData = {}
answerText = '',
responseData
}: {
answer?: string;
responseData?: Record<string, any>;
answerText?: string;
responseData?: ChatHistoryItemResType;
}) {
chatResponse = {
...chatResponse,
...responseData
};
answerText += answer;
responseData && chatResponse.push(responseData);
chatAnswerText += answerText;
}
function moduleInput(
module: RunningModuleItemType,
@@ -292,63 +282,45 @@ export async function dispatchModules({
}
async function moduleRun(module: RunningModuleItemType): Promise<any> {
if (res.closed) return Promise.resolve();
console.log('run=========', module.type, module.url);
console.log('run=========', module.flowType);
// direct answer
if (module.type === AppModuleItemTypeEnum.answer) {
const text =
module.inputs.find((item) => item.key === TaskResponseKeyEnum.answerText)?.value || '';
pushStore({
answer: text
});
return StreamAnswer({
res,
stream,
text: text
});
}
// get fetch params
const params: Record<string, any> = {};
module.inputs.forEach((item: any) => {
params[item.key] = item.value;
});
const props: Record<string, any> = {
res,
stream,
...params
};
if (module.type === AppModuleItemTypeEnum.switch) {
return moduleOutput(module, switchResponse(module));
}
if (
(module.type === AppModuleItemTypeEnum.http ||
module.type === AppModuleItemTypeEnum.initInput) &&
module.url
) {
// get fetch params
const params: Record<string, any> = {};
module.inputs.forEach((item: any) => {
params[item.key] = item.value;
});
const data = {
stream,
billId,
...params
const dispatchRes = await (async () => {
const callbackMap: Record<string, Function> = {
[FlowModuleTypeEnum.historyNode]: dispatchHistory,
[FlowModuleTypeEnum.questionInput]: dispatchChatInput,
[FlowModuleTypeEnum.answerNode]: dispatchAnswer,
[FlowModuleTypeEnum.chatNode]: dispatchChatCompletion,
[FlowModuleTypeEnum.kbSearchNode]: dispatchKBSearch,
[FlowModuleTypeEnum.classifyQuestion]: dispatchClassifyQuestion
};
if (callbackMap[module.flowType]) {
return callbackMap[module.flowType](props);
}
return {};
})();
// response data
const fetchRes = await moduleFetch({
res,
url: module.url,
data
});
return moduleOutput(module, fetchRes);
}
return moduleOutput(module, dispatchRes);
}
// start process width initInput
const initModules = runningModules.filter(
(item) => item.type === AppModuleItemTypeEnum.initInput
);
const initModules = runningModules.filter((item) => initModuleType[item.flowType]);
await Promise.all(initModules.map((module) => moduleInput(module, params)));
return {
responseData: chatResponse,
answerText
[TaskResponseKeyEnum.answerText]: chatAnswerText,
[TaskResponseKeyEnum.responseData]: chatResponse
};
}
@@ -359,10 +331,9 @@ function loadModules(
return modules.map((module) => {
return {
moduleId: module.moduleId,
type: module.type,
url: module.url,
flowType: module.flowType,
inputs: module.inputs
.filter((item) => item.type !== FlowInputItemTypeEnum.target || item.connected) // filter unconnected target input
.filter((item) => item.connected) // filter unconnected target input
.map((item) => {
if (typeof item.value !== 'string') {
return {
@@ -385,38 +356,9 @@ function loadModules(
outputs: module.outputs.map((item) => ({
key: item.key,
answer: item.key === TaskResponseKeyEnum.answerText,
response: item.response,
value: undefined,
targets: item.targets
}))
};
});
}
function StreamAnswer({
res,
stream = false,
text = ''
}: {
res: NextApiResponse;
stream?: boolean;
text?: string;
}) {
if (stream && text) {
return sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: text.replace(/\\n/g, '\n')
})
});
}
return text;
}
function switchResponse(module: RunningModuleItemType) {
const val = module?.inputs?.[0]?.value;
if (val) {
return { true: 1 };
}
return { false: 1 };
}

View File

@@ -43,7 +43,7 @@ const NodeChat = ({
return (
<MySelect
width={'100%'}
value={inputItem.value || chatModelList[0]?.model}
value={inputItem.value}
list={list}
onchange={(e) => {
onChangeNode({

View File

@@ -0,0 +1,9 @@
import React from 'react';
import { NodeProps } from 'reactflow';
import NodeCard from '../modules/NodeCard';
import { FlowModuleItemType } from '@/types/flow';
const NodeAnswer = ({ data: { ...props } }: NodeProps<FlowModuleItemType>) => {
return <NodeCard {...props}></NodeCard>;
};
export default React.memo(NodeAnswer);

View File

@@ -1,7 +1,7 @@
import React, { useRef } from 'react';
import { Box, Flex, useOutsideClick } from '@chakra-ui/react';
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
import type { AppModuleTemplateItemType } from '@/types/app';
import type { FlowModuleItemType } from '@/types/app';
import type { XYPosition } from 'reactflow';
import { useGlobalStore } from '@/store/global';
import Avatar from '@/components/Avatar';
@@ -12,7 +12,7 @@ const ModuleStoreList = ({
onClose
}: {
isOpen: boolean;
onAddNode: (e: { template: AppModuleTemplateItemType; position: XYPosition }) => void;
onAddNode: (e: { template: FlowModuleItemType; position: XYPosition }) => void;
onClose: () => void;
}) => {
const { isPc } = useGlobalStore();

View File

@@ -7,7 +7,7 @@ import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
type Props = {
children: React.ReactNode | React.ReactNode[] | string;
children?: React.ReactNode | React.ReactNode[] | string;
logo: string;
name: string;
description?: string;

View File

@@ -12,13 +12,20 @@ import ReactFlow, {
} from 'reactflow';
import { Box, Flex, IconButton, useTheme, useDisclosure } from '@chakra-ui/react';
import { SmallCloseIcon } from '@chakra-ui/icons';
import { edgeOptions, connectionLineStyle, FlowModuleTypeEnum } from '@/constants/flow';
import {
edgeOptions,
connectionLineStyle,
FlowModuleTypeEnum,
FlowInputItemTypeEnum
} from '@/constants/flow';
import { appModule2FlowNode, appModule2FlowEdge } from '@/utils/adapt';
import {
FlowModuleItemType,
FlowModuleTemplateType,
FlowOutputTargetItemType,
type FlowModuleItemChangeProps
} from '@/types/flow';
import { AppModuleItemType } from '@/types/app';
import { customAlphabet } from 'nanoid';
import { putAppById } from '@/api/app';
import { useRequest } from '@/hooks/useRequest';
@@ -61,20 +68,20 @@ const NodeUserGuide = dynamic(() => import('./components/Nodes/NodeUserGuide'),
import 'reactflow/dist/style.css';
import styles from './index.module.scss';
import { AppModuleItemType, AppModuleTemplateItemType } from '@/types/app';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
const nodeTypes = {
[FlowModuleTypeEnum.userGuide]: NodeUserGuide,
[FlowModuleTypeEnum.variable]: NodeVariable,
[FlowModuleTypeEnum.questionInputNode]: NodeQuestionInput,
[FlowModuleTypeEnum.questionInput]: NodeQuestionInput,
[FlowModuleTypeEnum.historyNode]: NodeHistory,
[FlowModuleTypeEnum.chatNode]: NodeChat,
[FlowModuleTypeEnum.kbSearchNode]: NodeKbSearch,
[FlowModuleTypeEnum.tfSwitchNode]: NodeTFSwitch,
[FlowModuleTypeEnum.answerNode]: NodeAnswer,
[FlowModuleTypeEnum.classifyQuestion]: NodeCQNode
// [FlowModuleTypeEnum.empty]: EmptyModule
};
const edgeTypes = {
buttonedge: ButtonEdge
@@ -147,7 +154,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
[setEdges, setNodes]
);
const onAddNode = useCallback(
({ template, position }: { template: AppModuleTemplateItemType; position: XYPosition }) => {
({ template, position }: { template: FlowModuleItemType; position: XYPosition }) => {
if (!reactFlowWrapper.current) return;
const reactFlowBounds = reactFlowWrapper.current.getBoundingClientRect();
const mouseX = (position.x - reactFlowBounds.left - x) / zoom - 100;
@@ -158,8 +165,8 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
appModule2FlowNode({
item: {
...template,
position: { x: mouseX, y: mouseY },
moduleId: nanoid()
moduleId: nanoid(),
position: { x: mouseX, y: mouseY }
},
onChangeNode,
onDelNode
@@ -169,14 +176,18 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
},
[onChangeNode, onDelNode, setNodes, x, y, zoom]
);
const flow2Modules = useCallback(() => {
const flow2AppModules = useCallback(() => {
const modules: AppModuleItemType[] = nodes.map((item) => ({
...item.data,
moduleId: item.data.moduleId,
position: item.position,
onChangeNode: undefined,
onDelNode: undefined,
outputs: item.data.outputs.map((output) => ({
...output,
flowType: item.data.flowType,
inputs: item.data.inputs.map((item) => ({
key: item.key,
value: item.value,
connected: item.type !== FlowInputItemTypeEnum.target
})),
outputs: item.data.outputs.map((item) => ({
key: item.key,
targets: [] as FlowOutputTargetItemType[]
}))
}));
@@ -184,9 +195,11 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
// update inputs and outputs
modules.forEach((module) => {
module.inputs.forEach((input) => {
input.connected = !!edges.find(
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
);
input.connected =
input.connected ||
!!edges.find(
(edge) => edge.target === module.moduleId && edge.targetHandle === input.key
);
});
module.outputs.forEach((output) => {
output.targets = edges
@@ -233,7 +246,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
const { mutate: onclickSave, isLoading } = useRequest({
mutationFn: () => {
return putAppById(app._id, {
modules: flow2Modules()
modules: flow2AppModules()
});
},
successToast: '保存配置成功',
@@ -270,6 +283,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
useEffect(() => {
initData(JSON.parse(JSON.stringify(app)));
}, [app, initData]);
console.log(flow2AppModules());
return (
<>
@@ -340,7 +354,7 @@ const AppEdit = ({ app, fullScreen, onFullScreen }: Props) => {
aria-label={'save'}
variant={'base'}
onClick={() => {
setTestModules(flow2Modules());
setTestModules(flow2AppModules());
}}
/>
</MyTooltip>

View File

@@ -1,114 +0,0 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
interface ConfigType {
headers?: { [key: string]: string };
hold?: boolean;
timeout?: number;
}
interface ResponseDataType {
code: number;
message: string;
data: any;
}
/**
* 请求开始
*/
function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
if (config.headers) {
// config.headers.Authorization = getToken();
}
return config;
}
/**
* 请求成功,检查请求头
*/
function responseSuccess(response: AxiosResponse<ResponseDataType>) {
return response;
}
/**
* 响应数据检查
*/
function checkRes(data: ResponseDataType) {
if (data === undefined) {
console.log('error->', data, 'data is empty');
return Promise.reject('服务器异常');
} else if (data.code < 200 || data.code >= 400) {
return Promise.reject(data);
}
return data.data;
}
/**
* 响应错误
*/
function responseError(err: any) {
console.log('error->', '请求错误', err);
if (!err) {
return Promise.reject({ message: '未知错误' });
}
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
return Promise.reject(err);
}
/* 创建请求实例 */
const instance = axios.create({
timeout: 60000, // 超时时间
headers: {
'content-type': 'application/json'
}
});
/* 请求拦截 */
instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
/* 响应拦截 */
instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
function request(url: string, data: any, config: ConfigType, method: Method): any {
/* 去空 */
for (const key in data) {
if (data[key] === null || data[key] === undefined) {
delete data[key];
}
}
return instance
.request({
baseURL: `http://localhost:${process.env.PORT || 3000}/api`,
url,
method,
data: ['POST', 'PUT'].includes(method) ? data : null,
params: !['POST', 'PUT'].includes(method) ? data : null,
...config // 用户自定义配置,可以覆盖前面的配置
})
.then((res) => checkRes(res.data))
.catch((err) => responseError(err));
}
/**
* api请求方式
* @param {String} url
* @param {Any} params
* @param {Object} config
* @returns
*/
export function GET<T>(url: string, params = {}, config: ConfigType = {}): Promise<T> {
return request(url, params, config, 'GET');
}
export function POST<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'POST');
}
export function PUT<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'PUT');
}
export function DELETE<T>(url: string, data = {}, config: ConfigType = {}): Promise<T> {
return request(url, data, config, 'DELETE');
}

View File

@@ -1,115 +0,0 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
import { parseStreamChunk } from '@/utils/adapt';
import { NextApiResponse } from 'next';
import { sseResponse } from '../utils/tools';
import { TaskResponseKeyEnum } from '@/constants/app';
interface Props {
res: NextApiResponse; // 用于流转发
url: string;
data: Record<string, any>;
}
export const moduleFetch = ({ url, data, res }: Props) =>
new Promise<Record<string, any>>(async (resolve, reject) => {
try {
const abortSignal = new AbortController();
const baseUrl = `http://localhost:${process.env.PORT || 3000}/api`;
const requestUrl = url.startsWith('/') ? `${baseUrl}${url}` : url;
const response = await fetch(requestUrl, {
method: 'POST',
// @ts-ignore
headers: {
'Content-Type': 'application/json',
rootkey: process.env.ROOT_KEY
},
body: JSON.stringify(data),
signal: abortSignal.signal
});
if (response.status >= 300 || response.status < 200) {
const err = await response.json();
return reject(err);
}
if (!response?.body) {
throw new Error('Request Error');
}
const responseType = response.headers.get('content-type');
if (responseType && responseType.includes('application/json')) {
const jsonResponse = await response.json();
return resolve(jsonResponse?.data || {});
}
const reader = response.body?.getReader();
let chatResponse: Record<string, any> = {
[TaskResponseKeyEnum.answerText]: ''
};
const read = async () => {
try {
const { done, value } = await reader.read();
if (done) {
return resolve(chatResponse);
} else if (res.closed) {
resolve(chatResponse);
abortSignal.abort();
return;
}
const chunkResponse = parseStreamChunk(value);
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return {};
}
})();
if (!res.closed && item.event === sseResponseEventEnum.moduleFetchResponse) {
chatResponse = {
...chatResponse,
...data
};
} else if (
!res.closed &&
item.event === sseResponseEventEnum.answer &&
data?.choices?.[0]?.delta
) {
// save answer
const answer: string = data?.choices?.[0].delta.content || '';
if (answer) {
chatResponse = {
...chatResponse,
[TaskResponseKeyEnum.answerText]:
chatResponse[TaskResponseKeyEnum.answerText] + answer
};
}
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: JSON.stringify(data)
});
} else if (item.event === sseResponseEventEnum.error) {
return reject(data);
}
});
read();
} catch (err: any) {
if (err?.message === 'The operation was aborted.') {
return;
}
reject(getErrText(err, '请求异常'));
}
};
read();
} catch (err: any) {
console.log(err);
reject(getErrText(err, '请求异常'));
}
});

View File

@@ -1,93 +1,54 @@
import { connectToDatabase, Bill, User, ShareChat } from '../mongo';
import { BillSourceEnum } from '@/constants/user';
import { getModel } from '../utils/data';
import type { BillListItemType } from '@/types/mongoSchema';
import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@/utils/user';
export const createTaskBill = async ({
export const pushTaskBill = async ({
appName,
appId,
userId,
source
source,
shareId,
response
}: {
appName: string;
appId: string;
userId: string;
source: `${BillSourceEnum}`;
shareId?: string;
response: ChatHistoryItemResType[];
}) => {
const res = await Bill.create({
userId,
appName,
appId,
total: 0,
source,
list: []
});
return String(res._id);
};
const total = response.reduce((sum, item) => sum + item.price, 0);
export const pushTaskBillListItem = async ({
billId,
moduleName,
amount,
model,
tokenLen
}: { billId?: string } & BillListItemType) => {
if (!billId) return;
try {
await Bill.findByIdAndUpdate(billId, {
$push: {
list: {
moduleName,
amount,
model,
tokenLen
}
}
});
} catch (error) {}
};
export const finishTaskBill = async ({ billId, shareId }: { billId: string; shareId?: string }) => {
try {
// update bill
const res = await Bill.findByIdAndUpdate(billId, [
{
$set: {
total: {
$sum: '$list.amount'
},
time: new Date()
}
}
]);
if (!res) return;
const total = res.list.reduce((sum, item) => sum + item.amount, 0) || 0;
if (shareId) {
updateShareChatBill({
shareId,
total
});
}
console.log('finish bill:', formatPrice(total));
// 账号扣费
await User.findByIdAndUpdate(res.userId, {
await Promise.allSettled([
Bill.create({
userId,
appName,
appId,
total,
source,
list: response.map((item) => ({
moduleName: item.moduleName,
amount: item.price || 0,
model: item.model,
tokenLen: item.tokens
}))
}),
User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('Finish bill failed:', error);
billId && Bill.findByIdAndDelete(billId);
}
};
}),
...(shareId
? [
updateShareChatBill({
shareId,
total
})
]
: [])
]);
export const delTaskBill = async (billId?: string) => {
if (!billId) return;
try {
await Bill.findByIdAndRemove(billId);
} catch (error) {}
console.log('finish bill:', formatPrice(total));
};
export const updateShareChatBill = async ({

View File

@@ -1,58 +1,31 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { getModel } from '@/service/utils/data';
import { authUser } from '@/service/utils/auth';
import { countModelPrice } from '@/service/events/pushBill';
export type Props = {
export type CQProps = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
billId?: string;
};
export type Response = { history: ChatItemType[] };
export type CQResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[key: string]: any;
};
const moduleName = 'Classify Question';
const agentModel = 'gpt-3.5-turbo';
const agentFunName = 'agent_user_question';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req, authRoot: true });
let { userChatInput } = req.body as Props;
if (!userChatInput) {
throw new Error('userChatInput is empty');
}
const response = await classifyQuestion(req.body);
jsonRes(res, {
data: response
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
const maxTokens = 2000;
/* request openai chat */
export async function classifyQuestion({
agents,
systemPrompt,
history = [],
userChatInput,
billId
}: Props) {
export const dispatchClassifyQuestion = async (props: Record<string, any>): Promise<CQResponse> => {
const { agents, systemPrompt, history = [], userChatInput } = props as CQProps;
const messages: ChatItemType[] = [
...(systemPrompt
? [
@@ -62,16 +35,16 @@ export async function classifyQuestion({
}
]
: []),
...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const filterMessages = ChatContextFilter({
// @ts-ignore
model: agentModel,
prompts: messages,
maxTokens: 1500
maxTokens
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
@@ -112,27 +85,19 @@ export async function classifyQuestion({
throw new Error('');
}
const totalTokens = response.data.usage?.total_tokens || 0;
const tokens = response.data.usage?.total_tokens || 0;
await pushTaskBillListItem({
billId,
moduleName: 'Classify Question',
amount: countModelPrice({ model: agentModel, tokens: totalTokens }),
model: getModel(agentModel)?.name,
tokenLen: totalTokens
});
console.log(agents.map((item) => `${item.value},返回: '${item.key}'`).join(''), arg);
const result = agents.find((item) => item.key === arg.type);
if (result) {
return {
[arg.type]: 1
};
}
const result = agents.find((item) => item.key === arg.type) || agents[0];
return {
[agents[0].key]: 1
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
moduleName,
price: countModelPrice({ model: agentModel, tokens }),
model: agentModel,
tokens,
cqList: agents,
cqResult: result.value
}
};
}
};

View File

@@ -1,89 +1,57 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes, sseErrRes } from '@/service/response';
import type { NextApiResponse } from 'next';
import { sseResponse } from '@/service/utils/tools';
import { OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI, countOpenAIToken } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import { TaskResponseKeyEnum } from '@/constants/app';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { authUser } from '@/service/utils/auth';
import { countModelPrice } from '@/service/events/pushBill';
export type Props = {
export type ChatProps = {
res: NextApiResponse;
model: `${OpenAiChatEnum}`;
temperature?: number;
maxToken?: number;
history?: ChatItemType[];
userChatInput: string;
stream?: boolean;
quotePrompt?: string;
quoteQA?: QuoteItemType[];
systemPrompt?: string;
limitPrompt?: string;
billId?: string;
};
export type Response = { [TaskResponseKeyEnum.answerText]: string; totalTokens: number };
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let { model, stream } = req.body as Props;
try {
await authUser({ req, authRoot: true });
const response = await chatCompletion({
...req.body,
res,
model
});
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.moduleFetchResponse,
data: JSON.stringify(response)
});
res.end();
} else {
jsonRes(res, {
data: response
});
}
} catch (err) {
if (stream) {
sseErrRes(res, err);
res.end();
} else {
jsonRes(res, {
code: 500,
error: err
});
}
}
}
const moduleName = 'AI Chat';
/* request openai chat */
export async function chatCompletion({
res,
model,
temperature = 0,
maxToken = 4000,
stream = false,
history = [],
quotePrompt = '',
userChatInput,
systemPrompt = '',
limitPrompt = '',
billId
}: Props & { res: NextApiResponse }): Promise<Response> {
export const dispatchChatCompletion = async (props: Record<string, any>): Promise<ChatResponse> => {
let {
res,
model,
temperature = 0,
maxToken = 4000,
stream = false,
history = [],
quoteQA = [],
userChatInput,
systemPrompt = '',
limitPrompt = ''
} = props as ChatProps;
// temperature adapt
const modelConstantsData = getChatModel(model);
if (!modelConstantsData) {
return Promise.reject('The chat model is undefined');
return Promise.reject('The chat model is undefined, you need to select a chat model.');
}
// FastGpt temperature range: 1~10
@@ -91,12 +59,19 @@ export async function chatCompletion({
const limitText = (() => {
if (limitPrompt) return limitPrompt;
if (quotePrompt && !limitPrompt) {
return '根据知识库内容回答问题,仅回复知识库提供的内容。';
if (quoteQA.length > 0 && !limitPrompt) {
return '根据知识库内容回答问题,仅回复知识库提供的内容,不要对知识库内容做补充说明。';
}
return '';
})();
const quotePrompt =
quoteQA.length > 0
? `下面是知识库内容:
${quoteQA.map((item, i) => `${i + 1}. [${item.q}\n${item.a}]`).join('\n')}
`
: '';
const messages: ChatItemType[] = [
...(quotePrompt
? [
@@ -138,6 +113,7 @@ export async function chatCompletion({
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi();
// console.log(adaptMessages);
/* count response max token */
const promptsToken = modelToolMap.countTokens({
@@ -152,8 +128,8 @@ export async function chatCompletion({
temperature: Number(temperature || 0),
max_tokens: maxToken,
messages: adaptMessages,
// frequency_penalty: 0.5, // 越大,重复内容越少
// presence_penalty: -0.5, // 越大,越容易出现新内容
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream
},
{
@@ -163,7 +139,7 @@ export async function chatCompletion({
}
);
const { answer, totalTokens } = await (async () => {
const { answerText, totalTokens, finishMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({ res, response });
@@ -174,38 +150,45 @@ export async function chatCompletion({
});
const totalTokens = countOpenAIToken({
messages: finishMessages,
model: 'gpt-3.5-turbo-16k'
messages: finishMessages
});
return {
answer,
totalTokens
answerText: answer,
totalTokens,
finishMessages
};
} else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
const finishMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
value: answer
});
return {
answer,
totalTokens
answerText: answer,
totalTokens,
finishMessages
};
}
})();
await pushTaskBillListItem({
billId,
moduleName: 'AI Chat',
amount: countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokenLen: totalTokens
});
return {
answerText: answer,
totalTokens
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
moduleName,
price: countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
answer: answerText,
maxToken,
finishMessages
}
};
}
};
async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) {
let answer = '';

View File

@@ -0,0 +1,6 @@
export * from './init/history';
export * from './init/userChatInput';
export * from './chat/oneapi';
export * from './kb/search';
export * from './tools/answer';
export * from './agent/classifyQuestion';

View File

@@ -0,0 +1,15 @@
import { SystemInputEnum } from '@/constants/app';
import { ChatItemType } from '@/types/chat';
export type HistoryProps = {
maxContext: number;
[SystemInputEnum.history]: ChatItemType[];
};
export const dispatchHistory = (props: Record<string, any>) => {
const { maxContext = 5, history = [] } = props as HistoryProps;
return {
history: history.slice(-maxContext)
};
};

View File

@@ -0,0 +1,12 @@
import { SystemInputEnum } from '@/constants/app';
export type UserChatInputProps = {
[SystemInputEnum.userChatInput]: string;
};
export const dispatchChatInput = (props: Record<string, any>) => {
const { userChatInput } = props as UserChatInputProps;
return {
userChatInput
};
};

View File

@@ -0,0 +1,76 @@
import { PgClient } from '@/service/pg';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice } from '@/service/events/pushBill';
import type { SelectedKbType } from '@/types/plugin';
import type { QuoteItemType } from '@/types/chat';
type KBSearchProps = {
kbList: SelectedKbType;
history: ChatItemType[];
similarity: number;
limit: number;
userChatInput: string;
};
export type KBSearchResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
isEmpty?: boolean;
unEmpty?: boolean;
quoteQA: QuoteItemType[];
};
const moduleName = 'KB Search';
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
const {
kbList = [],
history = [],
similarity = 0.8,
limit = 5,
userChatInput
} = props as KBSearchProps;
if (kbList.length === 0) {
return Promise.reject("You didn't choose the knowledge base");
}
if (!userChatInput) {
return Promise.reject('Your input is empty');
}
// get vector
const vectorModel = global.vectorModels[0];
const { vectors, tokenLen } = await getVector({
model: vectorModel.model,
input: [userChatInput]
});
// search kb
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select kb_id,id,q,a,source from modelData where kb_id IN (${kbList
.map((item) => `'${item.kbId}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
vectors[0]
}]' limit ${limit};
COMMIT;`
);
const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
return {
isEmpty: searchRes.length === 0 ? true : undefined,
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
responseData: {
moduleName,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
model: vectorModel.name,
tokens: tokenLen,
similarity,
limit
}
};
}

View File

@@ -0,0 +1,31 @@
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { NextApiResponse } from 'next';
export type AnswerProps = {
res: NextApiResponse;
text: string;
stream: boolean;
};
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const { res, text = '', stream } = props as AnswerProps;
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: text.replace(/\\n/g, '\n')
})
});
}
return {
[TaskResponseKeyEnum.answerText]: text
};
};

View File

@@ -5,7 +5,7 @@ import {
ModulesInputItemTypeEnum,
VariableInputEnum
} from '../constants/app';
import type { FlowInputItemType, FlowOutputItemType } from './flow';
import type { FlowInputItemType, FlowOutputItemType, FlowOutputTargetItemType } from './flow';
import type { AppSchema, kbSchema } from './mongoSchema';
import { ChatModelType } from '@/constants/model';
@@ -58,21 +58,12 @@ export type VariableItemType = {
};
/* app module */
export type AppModuleTemplateItemType = {
logo: string;
name: string;
description?: string;
intro: string;
flowType: `${FlowModuleTypeEnum}`;
type: `${AppModuleItemTypeEnum}`;
url?: string;
inputs: FlowInputItemType[];
outputs: FlowOutputItemType[];
};
export type AppModuleItemType = AppModuleTemplateItemType & {
export type AppModuleItemType = {
moduleId: string;
position?: XYPosition;
flowType: `${FlowModuleTypeEnum}`;
inputs: { key: string; value?: any; connected?: boolean }[];
outputs: { key: string; targets: FlowOutputTargetItemType[] }[];
};
export type AppItemType = {
@@ -83,8 +74,7 @@ export type AppItemType = {
export type RunningModuleItemType = {
moduleId: string;
type: `${AppModuleItemTypeEnum}`;
url?: string;
flowType: `${FlowModuleTypeEnum}`;
inputs: {
key: string;
value?: any;

View File

@@ -1,6 +1,7 @@
import { ChatRoleEnum, rawSearchKey } from '@/constants/chat';
import type { InitChatResponse, InitShareChatResponse } from '@/api/response/chat';
import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { ClassifyQuestionAgentItemType } from './app';
export type ExportChatType = 'md' | 'pdf' | 'html';
@@ -37,3 +38,33 @@ export type ShareChatHistoryItemType = HistoryItemType & {
export type ShareChatType = InitShareChatResponse & {
history: ShareChatHistoryItemType;
};
export type QuoteItemType = {
kb_id: string;
id: string;
q: string;
a: string;
source?: string;
};
export type ChatHistoryItemResType = {
moduleName: string;
price: number;
model?: string;
tokens?: number;
// chat
answer?: string;
question?: string;
temperature?: number;
maxToken?: number;
finishMessages?: ChatItemType[];
// kb search
similarity?: number;
limit?: number;
// cq
cqList?: ClassifyQuestionAgentItemType[];
cqResult?: string;
};

View File

@@ -5,6 +5,15 @@ import {
} from '@/constants/flow';
import { Connection } from 'reactflow';
import type { AppModuleItemType } from './app';
import { FlowModuleTypeEnum } from '@/constants/flow';
export type FlowModuleItemChangeProps = {
moduleId: string;
type?: 'inputs' | 'outputs';
key: string;
value: any;
valueKey?: keyof FlowInputItemType & keyof FlowBodyItemType;
};
export type FlowInputItemType = {
key: string; // 字段名
@@ -31,19 +40,21 @@ export type FlowOutputItemType = {
label: string;
description?: string;
type: `${FlowOutputItemTypeEnum}`;
response?: boolean;
targets: FlowOutputTargetItemType[];
};
export type FlowModuleItemChangeProps = {
moduleId: string;
type?: 'inputs' | 'outputs';
key: string;
value: any;
valueKey?: keyof FlowInputItemType & keyof FlowBodyItemType;
export type FlowModuleTemplateType = {
logo: string;
name: string;
description?: string;
intro: string;
flowType: `${FlowModuleTypeEnum}`;
url?: string;
inputs: FlowInputItemType[];
outputs: FlowOutputItemType[];
};
export type FlowModuleItemType = AppModuleItemType & {
export type FlowModuleItemType = FlowModuleTemplateType & {
moduleId: string;
onChangeNode: (e: FlowModuleItemChangeProps) => void;
onDelNode: (id: string) => void;
};

View File

@@ -6,11 +6,11 @@ import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
import type { AppModuleItemType } from '@/types/app';
import type { FlowModuleItemType } from '@/types/flow';
import type { FlowModuleItemType, FlowModuleTemplateType } from '@/types/flow';
import type { Edge, Node } from 'reactflow';
import { connectionLineStyle } from '@/constants/flow';
import { customAlphabet } from 'nanoid';
import { ModuleTemplates } from '@/constants/flow/ModuleTemplate';
import { EmptyModule, ModuleTemplates, ModuleTemplatesFlat } from '@/constants/flow/ModuleTemplate';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6);
export const adaptBill = (bill: BillSchema): UserBillType => {
@@ -92,48 +92,41 @@ export const appModule2FlowNode = ({
}): Node<FlowModuleItemType> => {
// init some static data
const template =
ModuleTemplates.map((templates) => templates.list)
?.flat()
.find((template) => template.flowType === item.flowType) || item;
ModuleTemplatesFlat.find((template) => template.flowType === item.flowType) || EmptyModule;
// replace item data
const moduleItem = {
const moduleItem: FlowModuleItemType = {
...item,
logo: template.logo,
name: template.name,
intro: template.intro,
type: template.type,
url: template.url,
inputs: template.inputs.map((templateInput) => {
// use latest inputs
const itemInput = item.inputs.find((item) => item.key === templateInput.key) || templateInput;
return {
...templateInput,
key: itemInput.key,
value: itemInput.value
};
}),
outputs: item.outputs.map((itemOutput) => {
outputs: template.outputs.map((templateOutput) => {
// unChange outputs
const templateOutput =
template.outputs.find((item) => item.key === itemOutput.key) || itemOutput;
const itemOutput =
item.outputs.find((item) => item.key === templateOutput.key) || templateOutput;
return {
...templateOutput,
key: itemOutput.key,
targets: itemOutput.targets || []
};
})
}),
onChangeNode,
onDelNode
};
return {
id: item.moduleId,
type: item.flowType,
data: {
...moduleItem,
onChangeNode,
onDelNode
},
data: moduleItem,
position: item.position || { x: 0, y: 0 }
};
};