fix: sse headers and extract module

This commit is contained in:
archer
2023-07-30 12:26:21 +08:00
parent b472127d3b
commit 5f5d439f55
10 changed files with 112 additions and 51 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -42,7 +42,7 @@ const MyModal = ({
{...props}
>
{!!title && <ModalHeader>{title}</ModalHeader>}
<Box overflow={'overlay'}>
<Box overflow={'overlay'} h={'100%'}>
{showCloseBtn && <ModalCloseButton />}
{children}
</Box>

View File

@@ -20,7 +20,7 @@ const MyTooltip = ({ children, forceShow = false, ...props }: Props) => {
py={2}
borderRadius={'8px'}
whiteSpace={'pre-wrap'}
shouldWrapChildren
boxShadow={'1px 1px 10px rgba(0,0,0,0.2)'}
{...props}
>
{children}

View File

@@ -60,7 +60,6 @@ export const UserInputModule: FlowModuleTemplateType = {
name: '用户问题(对话入口)',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
flowType: FlowModuleTypeEnum.questionInput,
url: '/app/modules/init/userChatInput',
inputs: [
{
key: SystemInputEnum.userChatInput,
@@ -83,7 +82,6 @@ export const HistoryModule: FlowModuleTemplateType = {
name: '聊天记录',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
flowType: FlowModuleTypeEnum.historyNode,
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -116,7 +114,6 @@ export const ChatModule: FlowModuleTemplateType = {
name: 'AI 对话',
intro: 'AI 大模型对话',
flowType: FlowModuleTypeEnum.chatNode,
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -206,7 +203,6 @@ export const KBSearchModule: FlowModuleTemplateType = {
name: '知识库搜索',
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: FlowModuleTypeEnum.kbSearchNode,
url: '/app/modules/kb/search',
inputs: [
{
key: 'kbList',
@@ -322,7 +318,6 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
description:
'根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于 laf 通用问题\n类型3: 关于 laf 代码问题\n类型4: 其他问题',
url: '/app/modules/agent/classifyQuestion',
flowType: FlowModuleTypeEnum.classifyQuestion,
inputs: [
{
@@ -378,6 +373,65 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
}
]
};
export const ContextExtractModule: FlowModuleTemplateType = {
logo: '/imgs/module/extract.png',
name: '内容提取',
intro: '从文本中提取出指定格式的数据',
description: '可从文本中提取指定的数据例如sql语句、搜索关键词、代码等',
flowType: FlowModuleTypeEnum.contentExtract,
inputs: [
{
key: 'systemPrompt',
type: FlowInputItemTypeEnum.textarea,
valueType: FlowValueTypeEnum.string,
label: '提取内容描述',
description: '写一段提取要求,告诉 AI 需要提取哪些内容',
placeholder: '例如: \n1. 根据用户的\n2. Sealos 是一个集群操作系统',
value: ''
},
Input_Template_History,
Input_Template_UserChatInput,
{
key: 'agents',
type: FlowInputItemTypeEnum.custom,
label: '',
value: [
{
value: '打招呼',
key: 'fasw'
},
{
value: '关于 xxx 的问题',
key: 'fqsw'
},
{
value: '其他问题',
key: 'fesw'
}
]
}
],
outputs: [
{
key: 'fasw',
label: '',
type: FlowOutputItemTypeEnum.hidden,
targets: []
},
{
key: 'fqsw',
label: '',
type: FlowOutputItemTypeEnum.hidden,
targets: []
},
{
key: 'fesw',
label: '',
type: FlowOutputItemTypeEnum.hidden,
targets: []
}
]
};
export const EmptyModule: FlowModuleTemplateType = {
logo: '/imgs/module/cq.png',
name: '该模块已被移除',

View File

@@ -30,7 +30,8 @@ export enum FlowModuleTypeEnum {
kbSearchNode = 'kbSearchNode',
tfSwitchNode = 'tfSwitchNode',
answerNode = 'answerNode',
classifyQuestion = 'classifyQuestion'
classifyQuestion = 'classifyQuestion',
contentExtract = 'contentExtract'
}
export enum SpecialInputKeyEnum {

View File

@@ -199,16 +199,6 @@ const Select = selectMultiStyle({
}
});
const Tooltip = defineStyleConfig({
baseStyle: {
p: 3,
bg: 'white',
color: 'blackAlpha.800',
borderRadius: '8px',
boxShadow: '1px 1px 10px rgba(0,0,0,0.2)'
}
});
// 全局主题
export const theme = extendTheme({
styles: {
@@ -309,7 +299,6 @@ export const theme = extendTheme({
Textarea,
Switch,
Select,
Tooltip,
NumberInput
}
});

View File

@@ -113,6 +113,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
throw new Error('Question is empty');
}
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
/* start process */
const { responseData, answerText } = await dispatchModules({
res,

View File

@@ -7,13 +7,14 @@ import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice } from '@/service/events/pushBill';
import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
import { SystemInputEnum } from '@/constants/app';
export type CQProps = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
[SystemInputEnum.userChatInput]: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
agents: ClassifyQuestionAgentItemType[];
};
export type CQResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
@@ -22,7 +23,7 @@ export type CQResponse = {
const agentModel = 'gpt-3.5-turbo';
const agentFunName = 'agent_user_question';
const maxTokens = 2000;
const maxTokens = 3000;
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Record<string, any>): Promise<CQResponse> => {

View File

@@ -7,40 +7,41 @@ import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { authUser } from '@/service/utils/auth';
import { SystemInputEnum } from '@/constants/app';
export type Props = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
[SystemInputEnum.userChatInput]: string;
description: string;
agents: ClassifyQuestionAgentItemType[];
};
export type Response = {
arguments: Record<string, any>;
deficiency: boolean;
};
export type Response = { history: ChatItemType[] };
const agentModel = 'gpt-3.5-turbo-16k';
const agentModel = 'gpt-3.5-turbo';
const agentFunName = 'agent_extract_data';
const maxTokens = 3000;
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req, authRoot: true });
const response = await extract(req.body);
jsonRes(res, {
data: response
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
/* request openai chat */
export async function extract({ agents, history = [], userChatInput, description }: Props) {
export async function extract({
systemPrompt,
agents,
history = [],
userChatInput,
description
}: Props): Promise<Response> {
const messages: ChatItemType[] = [
...history.slice(-4),
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
@@ -50,7 +51,7 @@ export async function extract({ agents, history = [], userChatInput, description
// @ts-ignore
model: agentModel,
prompts: messages,
maxTokens: 3000
maxTokens
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
@@ -94,7 +95,17 @@ export async function extract({ agents, history = [], userChatInput, description
}
);
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '{}');
let deficiency = false;
for (const key in arg) {
if (arg[key] === '') {
deficiency = true;
break;
}
}
return arg;
return {
arguments: arg,
deficiency
};
}

View File

@@ -52,7 +52,6 @@ export type FlowModuleTemplateType = {
description?: string;
intro: string;
flowType: `${FlowModuleTypeEnum}`;
url?: string;
inputs: FlowInputItemType[];
outputs: FlowOutputItemType[];
};