feat: app module

This commit is contained in:
archer
2023-06-27 20:41:36 +08:00
parent 7e6272ca1b
commit 4c54e1821b
17 changed files with 2059 additions and 121 deletions

View File

@@ -47,7 +47,7 @@ export async function saveChat({
modelId,
prompts,
userId
}: Props & { newChatId?: Types.ObjectId; userId: string }) {
}: Props & { newChatId?: Types.ObjectId; userId: string }): Promise<{ newChatId: string }> {
await connectToDatabase();
const { model } = await authModel({ modelId, userId, authOwner: false });
@@ -104,6 +104,7 @@ export async function saveChat({
]);
return {
...response
// @ts-ignore
newChatId: response?.newChatId || ''
};
}

View File

@@ -0,0 +1,114 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
export type Props = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
};
export type Response = { history: ChatItemType[] };
const agentModel = 'gpt-3.5-turbo-16k';
const agentFunName = 'agent_user_question';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let { systemPrompt, agents, history = [], userChatInput } = req.body as Props;
const response = await classifyQuestion({
systemPrompt,
history,
userChatInput,
agents
});
jsonRes(res, {
data: response
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
/* request openai chat */
export async function classifyQuestion({
agents,
systemPrompt,
history = [],
userChatInput
}: Props) {
const messages: ChatItemType[] = [
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const filterMessages = ChatContextFilter({
// @ts-ignore
model: agentModel,
prompts: messages,
maxTokens: 1500
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
// function body
const agentFunction = {
name: agentFunName,
description: '严格判断用户问题的类型',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: agents.map((item) => `${item.desc},返回: '${item.key}'`).join('; '),
enum: agents.map((item) => item.key)
}
},
required: ['type']
}
};
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
model: agentModel,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig()
}
);
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
if (!arg.type) {
throw new Error('');
}
console.log(adaptMessages, arg.type);
return {
[arg.type]: 1
};
}

View File

@@ -0,0 +1,97 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
export type Props = {
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
description: string;
};
export type Response = { history: ChatItemType[] };
const agentModel = 'gpt-3.5-turbo-16k';
const agentFunName = 'agent_extract_data';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const response = await extract(req.body);
jsonRes(res, {
data: response
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
/* request openai chat */
export async function extract({ agents, history = [], userChatInput, description }: Props) {
const messages: ChatItemType[] = [
...history.slice(-4),
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const filterMessages = ChatContextFilter({
// @ts-ignore
model: agentModel,
prompts: messages,
maxTokens: 3000
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const properties: Record<
string,
{
type: string;
description: string;
}
> = {};
agents.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.desc
};
});
// function body
const agentFunction = {
name: agentFunName,
description,
parameters: {
type: 'object',
properties,
required: agents.map((item) => item.key)
}
};
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
model: agentModel,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig()
}
);
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
return arg;
}

View File

@@ -0,0 +1,257 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { sseResponse } from '@/service/utils/tools';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatCompletionType, ChatContextFilter } from '@/service/utils/chat/index';
import type { ChatItemType } from '@/types/chat';
import { getSystemOpenAiKey } from '@/service/utils/auth';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { parseStreamChunk, textAdaptGptResponse } from '@/utils/adapt';
import { getOpenAIApi, axiosConfig } from '@/service/ai/openai';
export type Props = {
model: `${OpenAiChatEnum}`;
temperature?: number;
maxToken?: number;
history?: ChatItemType[];
userChatInput: string;
stream?: boolean;
quotePrompt?: string;
systemPrompt?: string;
limitPrompt?: string;
};
export type Response = { history: ChatItemType[] };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
let {
model,
stream = false,
temperature = 0,
maxToken = 4000,
history = [],
quotePrompt,
userChatInput,
systemPrompt,
limitPrompt
} = req.body as Props;
// temperature adapt
const modelConstantsData = ChatModelMap[model];
// FastGpt temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
const response = await chatCompletion({
res,
model,
temperature,
maxToken,
stream,
history,
userChatInput,
systemPrompt,
limitPrompt,
quotePrompt
});
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.moduleFetchResponse,
data: JSON.stringify(response)
});
res.end();
} else {
jsonRes(res, {
data: response
});
}
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
/* request openai chat */
export async function chatCompletion({
res,
model = OpenAiChatEnum.GPT35,
temperature,
maxToken = 4000,
stream,
history = [],
quotePrompt,
userChatInput,
systemPrompt,
limitPrompt
}: Props & { res: NextApiResponse }) {
const messages: ChatItemType[] = [
...(quotePrompt
? [
{
obj: ChatRoleEnum.System,
value: quotePrompt
}
]
: []),
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
...(limitPrompt
? [
{
obj: ChatRoleEnum.Human,
value: limitPrompt
}
]
: []),
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const modelTokenLimit = ChatModelMap[model]?.contextMaxToken || 4000;
const filterMessages = ChatContextFilter({
model,
prompts: messages,
maxTokens: Math.ceil(modelTokenLimit - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi();
console.log(adaptMessages);
/* count response max token */
const promptsToken = modelToolMap[model].countTokens({
messages: filterMessages
});
maxToken = maxToken + promptsToken > modelTokenLimit ? modelTokenLimit - promptsToken : maxToken;
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature || 0),
max_tokens: maxToken,
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream
},
{
timeout: stream ? 60000 : 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig()
}
);
const { answer, totalTokens } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({ res, response });
// count tokens
const finishMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
value: answer
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
answer,
totalTokens
};
} else {
const answer = stream ? '' : response.data.choices?.[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
return {
answer,
totalTokens
};
}
})();
// count price
const unitPrice = ChatModelMap[model]?.price || 3;
return {
answer
};
}
async function streamResponse({ res, response }: { res: NextApiResponse; response: any }) {
let answer = '';
let error: any = null;
const clientRes = async (data: string) => {
const { content = '' } = (() => {
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
error = json.error;
answer += content;
return { content };
} catch (error) {
return {};
}
})();
if (res.closed || error) return;
if (data === '[DONE]') {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
})
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
} else {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: content
})
});
}
};
try {
for await (const chunk of response.data as any) {
if (res.closed) break;
const parse = parseStreamChunk(chunk);
parse.forEach((item) => clientRes(item.data));
}
} catch (error) {
console.log('pipe error', error);
}
if (error) {
console.log(error);
return Promise.reject(error);
}
return {
answer
};
}

View File

@@ -0,0 +1,115 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { openaiEmbedding_system } from '../../plugin/openaiEmbedding';
import { modelToolMap } from '@/utils/plugin';
export type QuoteItemType = {
id: string;
q: string;
a: string;
source?: string;
};
type Props = {
kb_ids: string[];
history: ChatItemType[];
similarity: number;
limit: number;
maxToken: number;
userChatInput: string;
stream?: boolean;
};
type Response = {
rawSearch: QuoteItemType[];
isEmpty?: boolean;
quotePrompt: string;
};
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const {
kb_ids = [],
history = [],
similarity,
limit,
maxToken,
userChatInput
} = req.body as Props;
if (!similarity || !Array.isArray(kb_ids)) {
throw new Error('params is error');
}
const result = await appKbSearch({
kb_ids,
history,
similarity,
limit,
maxToken,
userChatInput
});
jsonRes<Response>(res, {
data: result
});
} catch (err) {
console.log(err);
jsonRes(res, {
code: 500,
error: err
});
}
});
export async function appKbSearch({
kb_ids = [],
history = [],
similarity = 0.8,
limit = 5,
maxToken = 2500,
userChatInput
}: Props): Promise<Response> {
// get vector
const promptVector = await openaiEmbedding_system({
input: [userChatInput]
});
// search kb
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select id,q,a,source from modelData where kb_id IN (${kb_ids
.map((item) => `'${item}'`)
.join(',')}) AND vector <#> '[${promptVector[0]}]' < -${similarity} order by vector <#> '[${
promptVector[0]
}]' limit ${limit};
COMMIT;`
);
const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
// filter part quote by maxToken
const sliceResult = modelToolMap['gpt-3.5-turbo']
.tokenSlice({
maxToken,
messages: searchRes.map((item, i) => ({
obj: ChatRoleEnum.System,
value: `${i + 1}: [${item.q}\n${item.a}]`
}))
})
.map((item) => item.value)
.join('\n')
.trim();
// slice filterSearch
const rawSearch = searchRes.slice(0, sliceResult.length);
return {
isEmpty: rawSearch.length === 0,
rawSearch,
quotePrompt: sliceResult ? `知识库:\n${sliceResult}` : ''
};
}

View File

@@ -0,0 +1,4 @@
export type Props = {
url: string;
body: Record<string, any>;
};

View File

@@ -81,3 +81,35 @@ export async function openaiEmbedding({
return result.vectors;
}
export async function openaiEmbedding_system({ input }: Props) {
const apiKey = getSystemOpenAiKey();
// 获取 chatAPI
const chatAPI = getOpenAIApi(apiKey);
// 把输入的内容转成向量
const result = await chatAPI
.createEmbedding(
{
model: embeddingModel,
input
},
{
timeout: 60000,
...axiosConfig(apiKey)
}
)
.then((res) => {
if (!res.data?.usage?.total_tokens) {
// @ts-ignore
return Promise.reject(res.data?.error?.message || 'Embedding Error');
}
return {
tokenLen: res.data.usage.total_tokens || 0,
vectors: res.data.data.map((item) => item.embedding)
};
});
return result.vectors;
}

View File

@@ -0,0 +1,338 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey, authShareChat } from '@/service/utils/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import type { CreateChatCompletionRequest } from 'openai';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import {
kbChatAppDemo,
chatAppDemo,
lafClassifyQuestionDemo,
classifyQuestionDemo,
SpecificInputEnum,
AppModuleItemTypeEnum
} from '@/constants/app';
import { Types } from 'mongoose';
import { moduleFetch } from '@/service/api/request';
import { AppModuleItemType } from '@/types/app';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
chatId?: string; // undefined: nonuse history, '': new chat, 'xxxxx': use history
appId?: string;
};
type FastGptShareChatProps = {
password?: string;
shareId?: string;
};
export type Props = CreateChatCompletionRequest &
FastGptWebChatProps &
FastGptShareChatProps & {
messages: MessageItemType[];
stream?: boolean;
};
export type ChatResponseType = {
newChatId: string;
quoteLen?: number;
};
/* 发送提示词 */
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
let { chatId, appId, shareId, password = '', stream = false, messages = [] } = req.body as Props;
try {
if (!messages) {
throw new Error('Prams Error');
}
if (!Array.isArray(messages)) {
throw new Error('messages is not array');
}
await connectToDatabase();
let startTime = Date.now();
/* user auth */
const {
userId,
appId: authAppid,
authType
} = await (shareId
? authShareChat({
shareId,
password
})
: authUser({ req }));
appId = appId ? appId : authAppid;
if (!appId) {
throw new Error('appId is empty');
}
// get history
const { history } = await getChatHistory({ chatId, userId });
const prompts = history.concat(gptMessage2ChatType(messages));
if (prompts[prompts.length - 1].obj === 'AI') {
prompts.pop();
}
// user question
const prompt = prompts.pop();
if (!prompt) {
throw new Error('Question is empty');
}
/* start process */
const modules = JSON.parse(JSON.stringify(classifyQuestionDemo.modules));
const { responseData, answerText } = await dispatchModules({
res,
modules,
params: {
history: prompts,
userChatInput: prompt.value
},
stream
});
// save chat
if (typeof chatId === 'string') {
const { newChatId } = await saveChat({
chatId,
modelId: appId,
prompts: [
prompt,
{
_id: messages[messages.length - 1]._id,
obj: ChatRoleEnum.AI,
value: answerText,
responseData
}
],
userId
});
if (newChatId) {
sseResponse({
res,
event: sseResponseEventEnum.chatResponse,
data: JSON.stringify({
newChatId
})
});
}
}
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.appStreamResponse,
data: JSON.stringify(responseData)
});
res.end();
} else {
res.json({
data: responseData,
id: chatId || '',
model: '',
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
choices: [
{
message: [{ role: 'assistant', content: answerText }],
finish_reason: 'stop',
index: 0
}
]
});
}
} catch (err: any) {
if (stream) {
res.status(500);
sseErrRes(res, err);
res.end();
} else {
jsonRes(res, {
code: 500,
error: err
});
}
}
});
async function dispatchModules({
res,
modules,
params = {},
stream = false
}: {
res: NextApiResponse;
modules: AppModuleItemType[];
params?: Record<string, any>;
stream?: boolean;
}) {
let storeData: Record<string, any> = {};
let responseData: Record<string, any> = {};
let answerText = '';
function pushStore({
isResponse = false,
answer,
data = {}
}: {
isResponse?: boolean;
answer?: string;
data?: Record<string, any>;
}) {
if (isResponse) {
responseData = {
...responseData,
...data
};
}
if (answer) {
answerText += answer;
}
storeData = {
...storeData,
...data
};
}
function moduleInput(module: AppModuleItemType, data: Record<string, any> = {}): Promise<any> {
const checkInputFinish = () => {
return !module.inputs.find((item: any) => item.value === undefined);
};
const updateInputValue = (key: string, value: any) => {
const index = module.inputs.findIndex((item: any) => item.key === key);
if (index === -1) return;
module.inputs[index].value = value;
};
return Promise.all(
Object.entries(data).map(([key, val]: any) => {
updateInputValue(key, val);
if (checkInputFinish()) {
return moduleRun(module);
}
})
);
}
function moduleOutput(module: AppModuleItemType, result: Record<string, any> = {}): Promise<any> {
return Promise.all(
module.outputs.map((item) => {
if (result[item.key] === undefined) return;
/* update output value */
item.value = result[item.key];
pushStore({
isResponse: item.response,
answer: item.answer ? item.value : '',
data: {
[item.key]: item.value
}
});
/* update target */
return Promise.all(
item.targets.map((target: any) => {
// find module
const targetModule = modules.find((item) => item.moduleId === target.moduleId);
if (!targetModule) return;
return moduleInput(targetModule, { [target.key]: item.value });
})
);
})
);
}
async function moduleRun(module: AppModuleItemType): Promise<any> {
console.log('run=========', module.type, module.url);
if (module.type === AppModuleItemTypeEnum.answer) {
pushStore({
answer: module.inputs[0].value
});
return AnswerResponse({
res,
stream,
text: module.inputs.find((item) => item.key === SpecificInputEnum.answerText)?.value
});
}
if (module.type === AppModuleItemTypeEnum.switch) {
return moduleOutput(module, switchResponse(module));
}
if (module.type === AppModuleItemTypeEnum.http && module.url) {
// get fetch params
const inputParams: Record<string, any> = {};
module.inputs.forEach((item: any) => {
inputParams[item.key] = item.value;
});
const data = {
stream,
...module.body,
...inputParams
};
// response data
const fetchRes = await moduleFetch({
res,
url: module.url,
data
});
return moduleOutput(module, fetchRes);
}
}
// 从填充 params 开始进入递归
await Promise.all(modules.map((module) => moduleInput(module, params)));
return {
responseData,
answerText
};
}
function AnswerResponse({
res,
stream = false,
text = ''
}: {
res: NextApiResponse;
stream?: boolean;
text?: '';
}) {
if (stream) {
return sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text
})
});
}
return text;
}
function switchResponse(module: any) {
const val = module?.inputs?.[0]?.value;
if (val) {
return { true: 1 };
}
return { false: 1 };
}