This commit is contained in:
archer
2023-07-17 17:39:36 +08:00
parent 53a4d9db05
commit dc1599ba3c
21 changed files with 762 additions and 54 deletions

View File

@@ -4,7 +4,7 @@ import type { InitChatResponse, InitShareChatResponse } from './response/chat';
import { RequestPaging } from '../types/index';
import type { ShareChatSchema } from '@/types/mongoSchema';
import type { ShareChatEditType } from '@/types/app';
import type { QuoteItemType } from '@/pages/api/openapi/modules/kb/search';
import type { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import type { Props as UpdateHistoryProps } from '@/pages/api/chat/history/updateChatHistory';
/**

View File

@@ -1,7 +1,7 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
import { parseStreamChunk } from '@/utils/adapt';
import { QuoteItemType } from '@/pages/api/openapi/modules/kb/search';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
import { rawSearchKey } from '@/constants/chat';
interface StreamFetchProps {

View File

@@ -17,7 +17,7 @@ import { useQuery } from '@tanstack/react-query';
import { getHistoryQuote, updateHistoryQuote } from '@/api/chat';
import { useToast } from '@/hooks/useToast';
import { getErrText } from '@/utils/tools';
import { QuoteItemType } from '@/pages/api/openapi/modules/kb/search';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
const QuoteModal = ({
historyId,

View File

@@ -34,7 +34,7 @@ import dynamic from 'next/dynamic';
const QuoteModal = dynamic(() => import('./QuoteModal'));
import styles from './index.module.scss';
import { QuoteItemType } from '@/pages/api/openapi/modules/kb/search';
import { QuoteItemType } from '@/pages/api/app/modules/kb/search';
const textareaMinH = '22px';
export type StartChatFnProps = {

View File

@@ -38,7 +38,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/openapi/modules/init/userChatInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
@@ -72,7 +72,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: 'AI 大模型对话',
flowType: 'chatNode',
type: 'http',
url: '/openapi/modules/chat/gpt',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -203,7 +203,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -254,7 +254,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/openapi/modules/init/userChatInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
@@ -292,7 +292,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -335,7 +335,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: 'AI 大模型对话',
flowType: 'chatNode',
type: 'http',
url: '/openapi/modules/chat/gpt',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -466,7 +466,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: 'kbSearchNode',
type: 'http',
url: '/openapi/modules/kb/search',
url: '/app/modules/kb/search',
inputs: [
{
key: 'kb_ids',
@@ -609,7 +609,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/openapi/modules/init/userChatInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
@@ -643,7 +643,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: 'OpenAI GPT 大模型对话。',
flowType: 'chatNode',
type: 'http',
url: '/openapi/modules/chat/gpt',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -774,7 +774,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -876,7 +876,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
{
id: 'CQ',
avatar: '/imgs/module/cq.png',
name: '意图识别 + 知识库',
name: '问题分类 + 知识库',
intro: '先对用户的问题进行分类,再根据不同类型问题,执行不同的操作',
modules: [
{
@@ -885,7 +885,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/openapi/modules/init/userChatInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
@@ -927,7 +927,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -970,7 +970,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: 'OpenAI GPT 大模型对话。',
flowType: 'chatNode',
type: 'http',
url: '/openapi/modules/chat/gpt',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -1101,7 +1101,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: 'kbSearchNode',
type: 'http',
url: '/openapi/modules/kb/search',
url: '/app/modules/kb/search',
inputs: [
{
key: 'kb_ids',
@@ -1208,7 +1208,7 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -1303,11 +1303,11 @@ export const appTemplates: (AppItemType & { avatar: string; intro: string })[] =
},
{
logo: '/imgs/module/cq.png',
name: '意图识别',
name: '问题分类',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
type: 'http',
url: '/openapi/modules/agent/recognizeIntention',
flowType: 'recognizeIntention',
url: '/app/modules/agent/classifyQuestion',
flowType: 'classifyQuestion',
inputs: [
{
key: 'systemPrompt',

View File

@@ -36,7 +36,7 @@ export const UserInputModule: AppModuleTemplateItemType = {
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: AppModuleItemTypeEnum.initInput,
flowType: FlowModuleTypeEnum.questionInputNode,
url: '/openapi/modules/init/userChatInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: SystemInputEnum.userChatInput,
@@ -59,7 +59,7 @@ export const HistoryModule: AppModuleTemplateItemType = {
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: AppModuleItemTypeEnum.initInput,
flowType: FlowModuleTypeEnum.historyNode,
url: '/openapi/modules/init/history',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
@@ -92,7 +92,7 @@ export const ChatModule: AppModuleTemplateItemType = {
intro: 'AI 大模型对话',
flowType: FlowModuleTypeEnum.chatNode,
type: AppModuleItemTypeEnum.http,
url: '/openapi/modules/chat/gpt',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
@@ -176,7 +176,7 @@ export const KBSearchModule: AppModuleTemplateItemType = {
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: FlowModuleTypeEnum.kbSearchNode,
type: AppModuleItemTypeEnum.http,
url: '/openapi/modules/kb/search',
url: '/app/modules/kb/search',
inputs: [
{
key: 'kb_ids',
@@ -283,13 +283,13 @@ export const TFSwitchModule: AppModuleTemplateItemType = {
}
]
};
export const RecognizeIntentionModule: AppModuleTemplateItemType = {
export const ClassifyQuestionModule: AppModuleTemplateItemType = {
logo: '/imgs/module/cq.png',
name: '意图识别',
name: '问题分类',
intro: '可以判断用户问题属于哪方面问题,从而执行不同的操作。',
type: AppModuleItemTypeEnum.http,
url: '/openapi/modules/agent/recognizeIntention',
flowType: FlowModuleTypeEnum.recognizeIntention,
url: '/app/modules/agent/classifyQuestion',
flowType: FlowModuleTypeEnum.classifyQuestion,
inputs: [
{
key: 'systemPrompt',
@@ -353,6 +353,6 @@ export const ModuleTemplates = [
},
{
label: 'Agent',
list: [RecognizeIntentionModule]
list: [ClassifyQuestionModule]
}
];

View File

@@ -26,7 +26,7 @@ export enum FlowModuleTypeEnum {
kbSearchNode = 'kbSearchNode',
tfSwitchNode = 'tfSwitchNode',
answerNode = 'answerNode',
recognizeIntention = 'recognizeIntention'
classifyQuestion = 'classifyQuestion'
}
export const edgeOptions = {

View File

@@ -2,22 +2,665 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { connectToDatabase, TrainingData, User, promotionRecord } from '@/service/mongo';
import { TrainingModeEnum } from '@/constants/plugin';
import mongoose from 'mongoose';
import { connectToDatabase, App } from '@/service/mongo';
import { appTemplates } from '@/constants/app';
import { rawSearchKey } from '@/constants/chat';
const chatTemplate = ({
model,
temperature,
maxToken,
systemPrompt,
limitPrompt
}: {
model: string;
temperature: number;
maxToken: number;
systemPrompt: string;
limitPrompt: string;
}) => {
return [
{
logo: '/imgs/module/userChatInput.png',
name: '用户问题',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
type: 'systemInput',
label: '用户问题',
connected: false
}
],
outputs: [
{
key: 'userChatInput',
label: '用户问题',
type: 'source',
targets: [
{
moduleId: '7pacf0',
key: 'userChatInput'
}
]
}
],
position: {
x: 477.9074315528994,
y: 1604.2106242223683
},
moduleId: '7z5g5h'
},
{
logo: '/imgs/module/AI.png',
name: 'AI 对话',
intro: 'AI 大模型对话',
flowType: 'chatNode',
type: 'http',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
type: 'custom',
label: '对话模型',
value: model,
list: [
{
label: 'Gpt35-4k',
value: 'gpt-3.5-turbo'
},
{
label: 'Gpt35-16k',
value: 'gpt-3.5-turbo-16k'
},
{
label: 'Gpt4',
value: 'gpt-4'
}
],
connected: false
},
{
key: 'temperature',
type: 'slider',
label: '温度',
value: temperature,
min: 0,
max: 10,
step: 1,
markList: [
{
label: '严谨',
value: 0
},
{
label: '发散',
value: 10
}
],
connected: false
},
{
key: 'maxToken',
type: 'slider',
label: '回复上限',
value: maxToken,
min: 0,
max: 16000,
step: 50,
markList: [
{
label: '0',
value: 0
},
{
label: '16000',
value: 16000
}
],
connected: false
},
{
key: 'systemPrompt',
type: 'textarea',
label: '系统提示词',
description:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
placeholder:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
value: systemPrompt,
connected: false
},
{
key: 'limitPrompt',
type: 'textarea',
label: '限定词',
description:
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
placeholder:
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
value: limitPrompt,
connected: false
},
{
key: 'switch',
type: 'target',
label: '触发器',
connected: false
},
{
key: 'quotePrompt',
type: 'target',
label: '引用内容',
connected: false
},
{
key: 'history',
type: 'target',
label: '聊天记录',
connected: true
},
{
key: 'userChatInput',
type: 'target',
label: '用户问题',
connected: true
}
],
outputs: [
{
key: 'answerText',
label: '模型回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []
}
],
position: {
x: 981.9682828103937,
y: 890.014595014464
},
moduleId: '7pacf0'
},
{
logo: '/imgs/module/history.png',
name: '聊天记录',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 4,
min: 0,
max: 50,
connected: false
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: false
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
type: 'source',
targets: [
{
moduleId: '7pacf0',
key: 'history'
}
]
}
],
position: {
x: 452.5466249541586,
y: 1276.3930310334215
},
moduleId: 'xj0c9p'
}
];
};
const kbTemplate = ({
model,
temperature,
maxToken,
systemPrompt,
limitPrompt,
kbs = [],
searchSimilarity,
searchLimit,
searchEmptyText
}: {
model: string;
temperature: number;
maxToken: number;
systemPrompt: string;
limitPrompt: string;
kbs: string[];
searchSimilarity: number;
searchLimit: number;
searchEmptyText: string;
}) => {
return [
{
logo: '/imgs/module/userChatInput.png',
name: '用户问题',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'questionInput',
url: '/app/modules/init/userChatInput',
inputs: [
{
key: 'userChatInput',
type: 'systemInput',
label: '用户问题',
connected: false
}
],
outputs: [
{
key: 'userChatInput',
label: '用户问题',
type: 'source',
targets: [
{
moduleId: 'q9v14m',
key: 'userChatInput'
},
{
moduleId: 'qbf8td',
key: 'userChatInput'
}
]
}
],
position: {
x: -210.24817109253843,
y: 665.7922967022607
},
moduleId: 'v0nc1s'
},
{
logo: '/imgs/module/history.png',
name: '聊天记录',
intro: '用户输入的内容。该模块通常作为应用的入口,用户在发送消息后会首先执行该模块。',
type: 'initInput',
flowType: 'historyNode',
url: '/app/modules/init/history',
inputs: [
{
key: 'maxContext',
type: 'numberInput',
label: '最长记录数',
value: 4,
min: 0,
max: 50,
connected: false
},
{
key: 'history',
type: 'hidden',
label: '聊天记录',
connected: false
}
],
outputs: [
{
key: 'history',
label: '聊天记录',
type: 'source',
targets: [
{
moduleId: 'qbf8td',
key: 'history'
}
]
}
],
position: {
x: 211.58250540918442,
y: 611.8700401034965
},
moduleId: 'k9y3jm'
},
{
logo: '/imgs/module/AI.png',
name: 'AI 对话',
intro: 'AI 大模型对话',
flowType: 'chatNode',
type: 'http',
url: '/app/modules/chat/gpt',
inputs: [
{
key: 'model',
type: 'custom',
label: '对话模型',
value: model,
list: [
{
label: 'Gpt35-4k',
value: 'gpt-3.5-turbo'
},
{
label: 'Gpt35-16k',
value: 'gpt-3.5-turbo-16k'
},
{
label: 'Gpt4',
value: 'gpt-4'
}
],
connected: false
},
{
key: 'temperature',
type: 'slider',
label: '温度',
value: temperature,
min: 0,
max: 10,
step: 1,
markList: [
{
label: '严谨',
value: 0
},
{
label: '发散',
value: 10
}
],
connected: false
},
{
key: 'maxToken',
type: 'slider',
label: '回复上限',
value: maxToken,
min: 0,
max: 16000,
step: 50,
markList: [
{
label: '0',
value: 0
},
{
label: '16000',
value: 16000
}
],
connected: false
},
{
key: 'systemPrompt',
type: 'textarea',
label: '系统提示词',
description:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
placeholder:
'模型固定的引导词,通过调整该内容,可以引导模型聊天方向。该内容会被固定在上下文的开头。',
value: systemPrompt,
connected: false
},
{
key: 'limitPrompt',
type: 'textarea',
label: '限定词',
description:
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
placeholder:
'限定模型对话范围,会被放置在本次提问前,拥有强引导和限定性。例如:\n1. 知识库是关于 Laf 的介绍,参考知识库回答问题,与 "Laf" 无关内容,直接回复: "我不知道"。\n2. 你仅回答关于 "xxx" 的问题,其他问题回复: "xxxx"',
value: limitPrompt,
connected: false
},
{
key: 'switch',
type: 'target',
label: '触发器',
connected: true
},
{
key: 'quotePrompt',
type: 'target',
label: '引用内容',
connected: true
},
{
key: 'history',
type: 'target',
label: '聊天记录',
connected: true
},
{
key: 'userChatInput',
type: 'target',
label: '用户问题',
connected: true
}
],
outputs: [
{
key: 'answerText',
label: '模型回复',
description: '直接响应,无需配置',
type: 'hidden',
targets: []
}
],
position: {
x: 830.725790038998,
y: 201.0790739617387
},
moduleId: 'qbf8td'
},
{
logo: '/imgs/module/db.png',
name: '知识库搜索',
intro: '去知识库中搜索对应的答案。可作为 AI 对话引用参考。',
flowType: 'kbSearchNode',
type: 'http',
url: '/app/modules/kb/search',
inputs: [
{
key: 'kb_ids',
type: 'custom',
label: '关联的知识库',
value: kbs,
list: [],
connected: false
},
{
key: 'similarity',
type: 'slider',
label: '相似度',
value: searchSimilarity,
min: 0,
max: 1,
step: 0.01,
markList: [
{
label: '0',
value: 0
},
{
label: '1',
value: 1
}
],
connected: false
},
{
key: 'limit',
type: 'slider',
label: '单次搜索上限',
value: searchLimit,
min: 1,
max: 20,
step: 1,
markList: [
{
label: '1',
value: 1
},
{
label: '20',
value: 20
}
],
connected: false
},
{
key: 'switch',
type: 'target',
label: '触发器',
connected: true
},
{
key: 'userChatInput',
type: 'target',
label: '用户问题',
connected: true
}
],
outputs: [
{
key: rawSearchKey,
label: '源搜索数据',
type: 'hidden',
response: true,
targets: []
},
{
key: 'isEmpty',
label: '搜索结果为空',
type: 'source',
targets: [
...(searchEmptyText
? [
{
moduleId: 'w8av9y',
key: 'switch'
}
]
: [])
]
},
{
key: 'quotePrompt',
label: '引用内容',
description: '搜索结果为空时不返回',
type: 'source',
targets: [
{
moduleId: 'qbf8td',
key: 'quotePrompt'
}
]
}
],
position: {
x: 101.2612930583856,
y: -31.342317423453437
},
moduleId: 'q9v14m'
},
searchEmptyText
? [
{
logo: '/imgs/module/reply.png',
name: '指定回复',
intro: '该模块可以直接回复一段指定的内容。常用于引导、提示。',
type: 'answer',
flowType: 'answerNode',
inputs: [
{
key: 'switch',
type: 'target',
label: '触发器',
connected: true
},
{
key: 'answerText',
value: searchEmptyText,
type: 'input',
label: '回复的内容',
connected: false
}
],
outputs: [],
position: {
x: 827.8570503787319,
y: -63.837994077710675
},
moduleId: 'w8av9y'
}
]
: []
];
};
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req, authRoot: true });
const { amount, userId, type } = req.body as {
amount: number;
userId: number;
type: 'withdraw';
};
await connectToDatabase();
// 遍历所有的 app
const apps = await App.find(
{
chat: { $ne: null },
modules: { $ne: null }
},
'_id chat'
).limit(2);
const result = await Promise.all(
apps.map(async (app) => {
const modules = (() => {
if (app.chat.relatedKbs.length === 0) {
return chatTemplate({
model: app.chat.chatModel,
temperature: app.chat.temperature,
maxToken: app.chat.maxToken,
systemPrompt: app.chat.systemPrompt,
limitPrompt: app.chat.limitPrompt
});
} else {
return kbTemplate({
model: app.chat.chatModel,
temperature: app.chat.temperature,
maxToken: app.chat.maxToken,
systemPrompt: app.chat.systemPrompt,
limitPrompt: app.chat.limitPrompt,
kbs: app.chat.relatedKbs,
searchEmptyText: app.chat.searchEmptyText,
searchLimit: app.chat.searchLimit,
searchSimilarity: app.chat.searchSimilarity
});
}
})();
return modules;
})
);
console.log(apps);
jsonRes(res, {
data: ''
data: {
apps,
result
}
});
} catch (error) {
jsonRes(res, {

View File

@@ -6,15 +6,16 @@ import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { RecognizeIntentionAgentItemType } from '@/types/app';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { getModel } from '@/service/utils/data';
import { authUser } from '@/service/utils/auth';
export type Props = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: RecognizeIntentionAgentItemType[];
agents: ClassifyQuestionAgentItemType[];
billId?: string;
};
export type Response = { history: ChatItemType[] };
@@ -24,6 +25,7 @@ const agentFunName = 'agent_user_question';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req, authRoot: true });
let { userChatInput } = req.body as Props;
if (!userChatInput) {
@@ -114,7 +116,7 @@ export async function classifyQuestion({
await pushTaskBillListItem({
billId,
moduleName: 'Recognize Intention',
moduleName: 'Classify Question',
amount: countModelPrice({ model: agentModel, tokens: totalTokens }),
model: getModel(agentModel)?.name,
tokenLen: totalTokens

View File

@@ -6,12 +6,13 @@ import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { RecognizeIntentionAgentItemType } from '@/types/app';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { authUser } from '@/service/utils/auth';
export type Props = {
history?: ChatItemType[];
userChatInput: string;
agents: RecognizeIntentionAgentItemType[];
agents: ClassifyQuestionAgentItemType[];
description: string;
};
export type Response = { history: ChatItemType[] };
@@ -21,6 +22,8 @@ const agentFunName = 'agent_extract_data';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await authUser({ req, authRoot: true });
const response = await extract(req.body);
jsonRes(res, {

View File

@@ -13,6 +13,7 @@ import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import { SpecificInputEnum } from '@/constants/app';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { authUser } from '@/service/utils/auth';
export type Props = {
model: `${OpenAiChatEnum}`;
@@ -31,6 +32,8 @@ export type Response = { [SpecificInputEnum.answerText]: string; totalTokens: nu
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
let { model, temperature = 0, stream } = req.body as Props;
try {
await authUser({ req, authRoot: true });
const response = await chatCompletion({
...req.body,
res,

View File

@@ -5,9 +5,10 @@ import { withNextCors } from '@/service/utils/tools';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum, rawSearchKey } from '@/constants/chat';
import { modelToolMap } from '@/utils/plugin';
import { getVector } from '../../plugin/vector';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
import { getModel } from '@/service/utils/data';
import { authUser } from '@/service/utils/auth';
export type QuoteItemType = {
kb_id: string;
@@ -34,6 +35,8 @@ type Response = {
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await authUser({ req, authRoot: true });
const { kb_ids = [], userChatInput } = req.body as Props;
if (!userChatInput || !Array.isArray(kb_ids)) {

View File

@@ -6,7 +6,7 @@ import { FlowModuleItemType } from '@/types/flow';
import Divider from './modules/Divider';
import Container from './modules/Container';
import RenderInput from './render/RenderInput';
import type { RecognizeIntentionAgentItemType } from '@/types/app';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { Handle, Position } from 'reactflow';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 4);
@@ -30,7 +30,7 @@ const NodeRINode = ({
value: agents = []
}: {
key: string;
value?: RecognizeIntentionAgentItemType[];
value?: ClassifyQuestionAgentItemType[];
}) => (
<Box>
{agents.map((item, i) => (

View File

@@ -70,7 +70,7 @@ const nodeTypes = {
[FlowModuleTypeEnum.kbSearchNode]: NodeKbSearch,
[FlowModuleTypeEnum.tfSwitchNode]: NodeTFSwitch,
[FlowModuleTypeEnum.answerNode]: NodeAnswer,
[FlowModuleTypeEnum.recognizeIntention]: NodeRINode
[FlowModuleTypeEnum.classifyQuestion]: NodeRINode
};
const edgeTypes = {
buttonedge: ButtonEdge

View File

@@ -18,8 +18,10 @@ export const moduleFetch = ({ url, data, res }: Props) =>
const requestUrl = url.startsWith('/') ? `${baseUrl}${url}` : url;
const response = await fetch(requestUrl, {
method: 'POST',
// @ts-ignore
headers: {
'Content-Type': 'application/json'
'Content-Type': 'application/json',
rootkey: process.env.ROOT_KEY
},
body: JSON.stringify(data),
signal: abortSignal.signal

View File

@@ -50,6 +50,48 @@ const AppSchema = new Schema({
modules: {
type: Array,
default: []
},
chat: {
relatedKbs: {
type: [Schema.Types.ObjectId],
ref: 'kb',
default: []
},
searchSimilarity: {
type: Number,
default: 0.8
},
searchLimit: {
type: Number,
default: 5
},
searchEmptyText: {
type: String,
default: ''
},
systemPrompt: {
type: String,
default: ''
},
limitPrompt: {
type: String,
default: ''
},
maxToken: {
type: Number,
default: 4000,
min: 100
},
temperature: {
type: Number,
min: 0,
max: 10,
default: 0
},
chatModel: {
// 聊天时使用的模型
type: String
}
}
});

View File

@@ -3,7 +3,6 @@ import jwt from 'jsonwebtoken';
import Cookie from 'cookie';
import { App, OpenApi, User, ShareChat, KB } from '../mongo';
import type { AppSchema } from '@/types/mongoSchema';
import { defaultApp } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';

View File

@@ -42,7 +42,7 @@ export type ShareChatEditType = {
/* agent */
/* question classify */
export type RecognizeIntentionAgentItemType = {
export type ClassifyQuestionAgentItemType = {
value: string;
key: string;
};

View File

@@ -44,6 +44,17 @@ export interface AppSchema {
collection: number;
};
modules: AppModuleItemType[];
chat: {
relatedKbs: string[];
searchSimilarity: number;
searchLimit: number;
searchEmptyText: string;
systemPrompt: string;
limitPrompt: string;
temperature: number;
maxToken: number;
chatModel: ChatModelType; // 聊天时用的模型,训练后就是训练的模型
};
}
export interface CollectionSchema {