monorepo packages (#344)

This commit is contained in:
Archer
2023-09-24 18:02:09 +08:00
committed by GitHub
parent a4ff5a3f73
commit 3d7178d06f
535 changed files with 12048 additions and 227 deletions

View File

@@ -0,0 +1,172 @@
import { Bill, User, OutLink } from '@/service/mongo';
import { BillSourceEnum } from '@/constants/user';
import { getModel } from '@/service/utils/data';
import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@fastgpt/common/bill/index';
import { addLog } from '@/service/utils/tools';
import type { CreateBillType } from '@/types/common/bill';
async function createBill(data: CreateBillType) {
try {
await Promise.all([
User.findByIdAndUpdate(data.userId, {
$inc: { balance: -data.total }
}),
Bill.create(data)
]);
} catch (error) {
addLog.error(`createBill error`, error);
}
}
async function concatBill({
billId,
total,
listIndex,
tokens = 0,
userId
}: {
billId?: string;
total: number;
listIndex?: number;
tokens?: number;
userId: string;
}) {
if (!billId) return;
try {
await Promise.all([
Bill.findOneAndUpdate(
{
_id: billId,
userId
},
{
$inc: {
total,
...(listIndex !== undefined && {
[`list.${listIndex}.amount`]: total,
[`list.${listIndex}.tokenLen`]: tokens
})
}
}
),
User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
})
]);
} catch (error) {}
}
export const pushChatBill = ({
appName,
appId,
userId,
source,
response
}: {
appName: string;
appId: string;
userId: string;
source: `${BillSourceEnum}`;
response: ChatHistoryItemResType[];
}) => {
const total = response.reduce((sum, item) => sum + item.price, 0);
createBill({
userId,
appName,
appId,
total,
source,
list: response.map((item) => ({
moduleName: item.moduleName,
amount: item.price || 0,
model: item.model,
tokenLen: item.tokens
}))
});
addLog.info(`finish completions`, {
source,
userId,
price: formatPrice(total)
});
return { total };
};
export const pushQABill = async ({
userId,
totalTokens,
billId
}: {
userId: string;
totalTokens: number;
billId: string;
}) => {
addLog.info('splitData generate success', { totalTokens });
// 获取模型单价格, 都是用 gpt35 拆分
const unitPrice = global.qaModel.price || 3;
// 计算价格
const total = unitPrice * totalTokens;
concatBill({
billId,
userId,
total,
tokens: totalTokens,
listIndex: 1
});
return { total };
};
export const pushGenerateVectorBill = async ({
billId,
userId,
tokenLen,
model
}: {
billId?: string;
userId: string;
tokenLen: number;
model: string;
}) => {
// 计算价格. 至少为1
const vectorModel =
global.vectorModels.find((item) => item.model === model) || global.vectorModels[0];
const unitPrice = vectorModel.price || 0.2;
let total = unitPrice * tokenLen;
total = total > 1 ? total : 1;
// 插入 Bill 记录
if (billId) {
concatBill({
userId,
total,
billId,
tokens: tokenLen,
listIndex: 0
});
} else {
createBill({
userId,
appName: '索引生成',
total,
source: BillSourceEnum.fastgpt,
list: [
{
moduleName: '索引生成',
amount: total,
model: vectorModel.model,
tokenLen
}
]
});
}
return { total };
};
export const countModelPrice = ({ model, tokens }: { model: string; tokens: number }) => {
const modelData = getModel(model);
if (!modelData) return 0;
return modelData.price * tokens;
};

View File

@@ -0,0 +1,46 @@
import { Schema, model, models, Model } from 'mongoose';
import { BillSchema as BillType } from '@/types/common/bill';
import { BillSourceMap } from '@/constants/user';
const BillSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
appName: {
type: String,
default: ''
},
appId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: false
},
time: {
type: Date,
default: () => new Date()
},
total: {
type: Number,
required: true
},
source: {
type: String,
enum: Object.keys(BillSourceMap),
required: true
},
list: {
type: Array,
default: []
}
});
try {
BillSchema.index({ userId: 1 });
BillSchema.index({ time: 1 }, { expireAfterSeconds: 90 * 24 * 60 * 60 });
} catch (error) {
console.log(error);
}
export const Bill: Model<BillType> = models['bill'] || model('bill', BillSchema);

View File

@@ -0,0 +1,24 @@
import { Schema, model, models, Model } from 'mongoose';
import type { IpLimitSchemaType } from '@/types/common/ipLimit';
const IpLimitSchema = new Schema({
eventId: {
type: String,
required: true
},
ip: {
type: String,
required: true
},
account: {
type: Number,
default: 0
},
lastMinute: {
type: Date,
default: () => new Date()
}
});
export const IpLimit: Model<IpLimitSchemaType> =
models['ip_limit'] || model('ip_limit', IpLimitSchema);

View File

@@ -0,0 +1,39 @@
import type { NextApiResponse } from 'next';
export function responseWriteController({
res,
readStream
}: {
res: NextApiResponse;
readStream: any;
}) {
res.on('drain', () => {
readStream.resume();
});
return (text: string) => {
const writeResult = res.write(text);
if (!writeResult) {
readStream.pause();
}
};
}
export function responseWrite({
res,
write,
event,
data
}: {
res?: NextApiResponse;
write?: (text: string) => void;
event?: string;
data: string;
}) {
const Write = write || res?.write;
if (!Write) return;
event && Write(`event: ${event}\n`);
Write(`data: ${data}\n\n`);
}

View File

@@ -0,0 +1,68 @@
import { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import type { NextApiResponse } from 'next';
import { countMessagesTokens, countPromptTokens } from '@/utils/common/tiktoken';
import { adaptRole_Chat2Message } from '@/utils/common/adapt/message';
export type ChatCompletionResponseType = {
streamResponse: any;
responseMessages: ChatItemType[];
responseText: string;
totalTokens: number;
};
export type StreamResponseType = {
chatResponse: any;
messages: ChatItemType[];
res: NextApiResponse;
model: string;
[key: string]: any;
};
/* slice chat context by tokens */
export function ChatContextFilter({
messages = [],
maxTokens
}: {
messages: ChatItemType[];
maxTokens: number;
}) {
if (!Array.isArray(messages)) {
return [];
}
const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0);
// If the text length is less than half of the maximum token, no calculation is required
if (rawTextLen < maxTokens * 0.5) {
return messages;
}
// filter startWith system prompt
const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System);
const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex);
const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex);
// reduce token of systemPrompt
maxTokens -= countMessagesTokens({
messages: systemPrompts
});
// 根据 tokens 截断内容
const chats: ChatItemType[] = [];
// 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) {
const item = chatPrompts[i];
chats.unshift(item);
const tokens = countPromptTokens(item.value, adaptRole_Chat2Message(item.obj));
maxTokens -= tokens;
/* 整体 tokens 超出范围, system必须保留 */
if (maxTokens <= 0) {
chats.shift();
break;
}
}
return [...systemPrompts, ...chats];
}

View File

@@ -0,0 +1,91 @@
export const ERROR_CODE: { [key: number]: string } = {
400: '请求失败',
401: '无权访问',
403: '紧张访问',
404: '请求不存在',
405: '请求方法错误',
406: '请求的格式错误',
410: '资源已删除',
422: '验证错误',
500: '服务器发生错误',
502: '网关错误',
503: '服务器暂时过载或维护',
504: '网关超时'
};
export const TOKEN_ERROR_CODE: Record<number, string> = {
403: '登录状态无效,请重新登录'
};
export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法',
rate_limit_reached: 'API被限制请稍后再试',
'Bad Request': 'Bad Request~ 可能内容太多了',
'Bad Gateway': '网关异常,请重试'
};
export const openaiAccountError: Record<string, string> = {
insufficient_quota: 'API 余额不足',
invalid_api_key: 'openai 账号异常',
account_deactivated: '账号已停用',
invalid_request_error: '无效请求'
};
export const proxyError: Record<string, boolean> = {
ECONNABORTED: true,
ECONNRESET: true
};
export enum ERROR_ENUM {
unAuthorization = 'unAuthorization',
insufficientQuota = 'insufficientQuota',
unAuthModel = 'unAuthModel',
unAuthApiKey = 'unAuthApiKey',
unAuthKb = 'unAuthKb',
unAuthFile = 'unAuthFile'
}
export const ERROR_RESPONSE: Record<
any,
{
code: number;
statusText: string;
message: string;
data?: any;
}
> = {
[ERROR_ENUM.unAuthorization]: {
code: 403,
statusText: ERROR_ENUM.unAuthorization,
message: '凭证错误',
data: null
},
[ERROR_ENUM.insufficientQuota]: {
code: 510,
statusText: ERROR_ENUM.insufficientQuota,
message: '账号余额不足',
data: null
},
[ERROR_ENUM.unAuthModel]: {
code: 511,
statusText: ERROR_ENUM.unAuthModel,
message: '无权使用该模型',
data: null
},
[ERROR_ENUM.unAuthKb]: {
code: 512,
statusText: ERROR_ENUM.unAuthKb,
message: '无权使用该知识库',
data: null
},
[ERROR_ENUM.unAuthFile]: {
code: 513,
statusText: ERROR_ENUM.unAuthFile,
message: '无权阅读该文件',
data: null
},
[ERROR_ENUM.unAuthApiKey]: {
code: 514,
statusText: ERROR_ENUM.unAuthApiKey,
message: 'Api Key 不合法',
data: null
}
};

View File

@@ -0,0 +1,201 @@
import { TrainingData } from '@/service/mongo';
import { pushQABill } from '@/service/common/bill/push';
import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
import { authBalanceByUid } from '../utils/auth';
import { axiosConfig, getAIChatApi } from '../lib/openai';
import { ChatCompletionRequestMessage } from 'openai';
import { addLog } from '../utils/tools';
import { splitText2Chunks } from '@/utils/file';
import { replaceVariable } from '@/utils/common/tools/text';
import { Prompt_AgentQA } from '@/prompts/core/agent';
import { pushDataToKb } from '@/pages/api/core/dataset/data/pushData';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
};
export async function generateQA(): Promise<any> {
if (global.qaQueueLen >= global.systemEnv.qaMaxProcess) return;
global.qaQueueLen++;
let trainingId = '';
let userId = '';
try {
const data = await TrainingData.findOneAndUpdate(
{
mode: TrainingModeEnum.qa,
lockTime: { $lte: new Date(Date.now() - 4 * 60 * 1000) }
},
{
lockTime: new Date()
}
).select({
_id: 1,
userId: 1,
kbId: 1,
prompt: 1,
q: 1,
source: 1,
file_id: 1,
billId: 1
});
// task preemption
if (!data) {
reduceQueue();
global.qaQueueLen <= 0 && console.log(`【QA】任务完成`);
return;
}
trainingId = data._id;
userId = String(data.userId);
const kbId = String(data.kbId);
await authBalanceByUid(userId);
const startTime = Date.now();
const chatAPI = getAIChatApi();
// request LLM to get QA
const text = data.q;
const messages: ChatCompletionRequestMessage[] = [
{
role: 'user',
content: data.prompt
? replaceVariable(data.prompt, { text })
: replaceVariable(Prompt_AgentQA.prompt, {
theme: Prompt_AgentQA.defaultTheme,
text
})
}
];
const { data: chatResponse } = await chatAPI.createChatCompletion(
{
model: global.qaModel.model,
temperature: 0.01,
messages,
stream: false
},
{
timeout: 480000,
...axiosConfig()
}
);
const answer = chatResponse.choices?.[0].message?.content;
const totalTokens = chatResponse.usage?.total_tokens || 0;
const qaArr = formatSplitText(answer || ''); // 格式化后的QA对
// get vector and insert
await pushDataToKb({
kbId,
data: qaArr.map((item) => ({
...item,
source: data.source,
file_id: data.file_id
})),
userId,
mode: TrainingModeEnum.index,
billId: data.billId
});
// delete data from training
await TrainingData.findByIdAndDelete(data._id);
console.log(`split result length: `, qaArr.length);
console.log('生成QA成功time:', `${(Date.now() - startTime) / 1000}s`);
// 计费
if (qaArr.length > 0) {
pushQABill({
userId: data.userId,
totalTokens,
billId: data.billId
});
} else {
addLog.info(`QA result 0:`, { answer });
}
reduceQueue();
generateQA();
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
console.log('openai error: 生成QA错误');
console.log(err.response?.status, err.response?.statusText, err.response?.data);
} else {
addLog.error('生成 QA 错误', err);
}
// message error or openai account error
if (err?.message === 'invalid message format') {
await TrainingData.findByIdAndRemove(trainingId);
}
// 账号余额不足,删除任务
if (userId && err === ERROR_ENUM.insufficientQuota) {
sendInform({
type: 'system',
title: 'QA 任务中止',
content:
'由于账号余额不足,索引生成任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
userId
});
console.log('余额不足,暂停向量生成任务');
await TrainingData.updateMany(
{
userId
},
{
lockTime: new Date('2999/5/5')
}
);
return generateQA();
}
setTimeout(() => {
generateQA();
}, 1000);
}
}
/**
* 检查文本是否按格式返回
*/
function formatSplitText(text: string) {
text = text.replace(/\\n/g, '\n'); // 将换行符替换为空格
const regex = /Q\d+:(\s*)(.*)(\s*)A\d+:(\s*)([\s\S]*?)(?=Q|$)/g; // 匹配Q和A的正则表达式
const matches = text.matchAll(regex); // 获取所有匹配到的结果
const result = []; // 存储最终的结果
for (const match of matches) {
const q = match[2];
const a = match[5];
if (q && a) {
// 如果Q和A都存在就将其添加到结果中
result.push({
q: `${q}\n${a.trim().replace(/\n\s*/g, '\n')}`,
a: ''
});
}
}
// empty result. direct split chunk
if (result.length === 0) {
const splitRes = splitText2Chunks({ text: text, maxLen: 500 });
splitRes.chunks.forEach((item) => {
result.push({
q: item,
a: ''
});
});
}
return result;
}

View File

@@ -0,0 +1,153 @@
import { insertData2Dataset } from '@/service/pg';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { TrainingData } from '../models/trainingData';
import { ERROR_ENUM } from '../errorCode';
import { TrainingModeEnum } from '@/constants/plugin';
import { sendInform } from '@/pages/api/user/inform/send';
import { addLog } from '../utils/tools';
const reduceQueue = () => {
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
};
/* 索引生成队列。每导入一次,就是一个单独的线程 */
export async function generateVector(): Promise<any> {
if (global.vectorQueueLen >= global.systemEnv.vectorMaxProcess) return;
global.vectorQueueLen++;
let trainingId = '';
let userId = '';
let dataItems: {
q: string;
a: string;
}[] = [];
try {
const data = await TrainingData.findOneAndUpdate(
{
mode: TrainingModeEnum.index,
lockTime: { $lte: new Date(Date.now() - 1 * 60 * 1000) }
},
{
lockTime: new Date()
}
).select({
_id: 1,
userId: 1,
kbId: 1,
q: 1,
a: 1,
source: 1,
file_id: 1,
vectorModel: 1,
billId: 1
});
// task preemption
if (!data) {
reduceQueue();
global.vectorQueueLen <= 0 && console.log(`【索引】任务完成`);
return;
}
trainingId = data._id;
userId = String(data.userId);
const kbId = String(data.kbId);
dataItems = [
{
q: data.q.replace(/[\x00-\x08]/g, ' '),
a: data.a.replace(/[\x00-\x08]/g, ' ')
}
];
// 生成词向量
const { vectors } = await getVector({
model: data.vectorModel,
input: dataItems.map((item) => item.q),
userId,
billId: data.billId
});
// 生成结果插入到 pg
await insertData2Dataset({
userId,
kbId,
data: vectors.map((vector, i) => ({
q: dataItems[i].q,
a: dataItems[i].a,
source: data.source,
file_id: data.file_id,
vector
}))
});
// delete data from training
await TrainingData.findByIdAndDelete(data._id);
// console.log(`生成向量成功: ${data._id}`);
reduceQueue();
generateVector();
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
addLog.info('openai error: 生成向量错误', {
status: err.response?.status,
stateusText: err.response?.statusText,
data: err.response?.data
});
} else {
addLog.error('openai error: 生成向量错误', err);
}
// message error or openai account error
if (
err?.message === 'invalid message format' ||
err.response?.data?.error?.type === 'invalid_request_error'
) {
addLog.info('invalid message format', {
dataItems
});
try {
await TrainingData.findByIdAndUpdate(trainingId, {
lockTime: new Date('2998/5/5')
});
} catch (error) {}
return generateVector();
}
// err vector data
if (err?.code === 500) {
await TrainingData.findByIdAndDelete(trainingId);
return generateVector();
}
// 账号余额不足,删除任务
if (userId && err === ERROR_ENUM.insufficientQuota) {
try {
sendInform({
type: 'system',
title: '索引生成任务中止',
content:
'由于账号余额不足,索引生成任务中止,重新充值后将会继续。暂停的任务将在 7 天后被删除。',
userId
});
console.log('余额不足,暂停向量生成任务');
await TrainingData.updateMany(
{
userId
},
{
lockTime: new Date('2999/5/5')
}
);
} catch (error) {}
return generateVector();
}
setTimeout(() => {
generateVector();
}, 1000);
}
}

View File

@@ -0,0 +1,16 @@
export const startSendInform = async () => {
if (global.sendInformQueue.length === 0 || global.sendInformQueueLen > 0) return;
global.sendInformQueueLen++;
try {
const fn = global.sendInformQueue[global.sendInformQueue.length - 1];
await fn();
global.sendInformQueue.pop();
global.sendInformQueueLen--;
startSendInform();
} catch (error) {
global.sendInformQueueLen--;
startSendInform();
}
};

View File

@@ -0,0 +1,126 @@
import mongoose, { Types } from 'mongoose';
import fs from 'fs';
import fsp from 'fs/promises';
import { ERROR_ENUM } from '../errorCode';
import type { GSFileInfoType } from '@/types/common/file';
enum BucketNameEnum {
dataset = 'dataset'
}
export class GridFSStorage {
readonly type = 'gridfs';
readonly bucket: `${BucketNameEnum}`;
readonly uid: string;
constructor(bucket: `${BucketNameEnum}`, uid: string) {
this.bucket = bucket;
this.uid = String(uid);
}
Collection() {
return mongoose.connection.db.collection(`${this.bucket}.files`);
}
GridFSBucket() {
return new mongoose.mongo.GridFSBucket(mongoose.connection.db, {
bucketName: this.bucket
});
}
async save({
path,
filename,
metadata = {}
}: {
path: string;
filename: string;
metadata?: Record<string, any>;
}) {
if (!path) return Promise.reject(`filePath is empty`);
if (!filename) return Promise.reject(`filename is empty`);
const stats = await fsp.stat(path);
if (!stats.isFile()) return Promise.reject(`${path} is not a file`);
metadata.userId = this.uid;
// create a gridfs bucket
const bucket = this.GridFSBucket();
const stream = bucket.openUploadStream(filename, {
metadata,
contentType: metadata?.contentType
});
// save to gridfs
await new Promise((resolve, reject) => {
fs.createReadStream(path)
.pipe(stream as any)
.on('finish', resolve)
.on('error', reject);
});
return String(stream.id);
}
async findAndAuthFile(id: string): Promise<GSFileInfoType> {
if (!id) {
return Promise.reject(`id is empty`);
}
// create a gridfs bucket
const bucket = this.GridFSBucket();
// check if file exists
const files = await bucket.find({ _id: new Types.ObjectId(id) }).toArray();
const file = files.shift();
if (!file) {
return Promise.reject(`file not found`);
}
if (file.metadata?.userId !== this.uid) {
return Promise.reject(ERROR_ENUM.unAuthFile);
}
return {
id: String(file._id),
filename: file.filename,
contentType: file.metadata?.contentType,
encoding: file.metadata?.encoding,
uploadDate: file.uploadDate,
size: file.length
};
}
async delete(id: string) {
await this.findAndAuthFile(id);
const bucket = this.GridFSBucket();
await bucket.delete(new Types.ObjectId(id));
return true;
}
async deleteFilesByKbId(kbId: string) {
if (!kbId) return;
const bucket = this.GridFSBucket();
const files = await bucket
.find({ ['metadata.kbId']: kbId, ['metadata.userId']: this.uid }, { projection: { _id: 1 } })
.toArray();
return Promise.all(files.map((file) => this.delete(String(file._id))));
}
async download(id: string) {
await this.findAndAuthFile(id);
const bucket = this.GridFSBucket();
const stream = bucket.openDownloadStream(new Types.ObjectId(id));
const buf: Buffer = await new Promise((resolve, reject) => {
const buffers: Buffer[] = [];
stream.on('data', (data) => buffers.push(data));
stream.on('error', reject);
stream.on('end', () => resolve(Buffer.concat(buffers)));
});
return buf;
}
}

View File

@@ -0,0 +1,28 @@
import { UserModelSchema } from '@/types/mongoSchema';
import { Configuration, OpenAIApi } from 'openai';
export const openaiBaseUrl = process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1';
export const baseUrl = process.env.ONEAPI_URL || openaiBaseUrl;
export const systemAIChatKey = process.env.CHAT_API_KEY || '';
export const getAIChatApi = (props?: UserModelSchema['openaiAccount']) => {
return new OpenAIApi(
new Configuration({
basePath: props?.baseUrl || baseUrl,
apiKey: props?.key || systemAIChatKey
})
);
};
/* openai axios config */
export const axiosConfig = (props?: UserModelSchema['openaiAccount']) => {
return {
baseURL: props?.baseUrl || baseUrl, // 此处仅对非 npm 模块有效
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${props?.key || systemAIChatKey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
};
};

View File

@@ -0,0 +1,70 @@
import { Schema, model, models, Model } from 'mongoose';
import { AppSchema as AppType } from '@/types/mongoSchema';
const AppSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
name: {
type: String,
required: true
},
type: {
type: String,
default: 'advanced',
enum: ['basic', 'advanced']
},
avatar: {
type: String,
default: '/icon/logo.svg'
},
intro: {
type: String,
default: ''
},
updateTime: {
type: Date,
default: () => new Date()
},
share: {
topNum: {
type: Number,
default: 0
},
isShare: {
type: Boolean,
default: false
},
isShareDetail: {
// share model detail info. false: just show name and intro
type: Boolean,
default: false
},
intro: {
type: String,
default: '',
maxlength: 150
},
collection: {
type: Number,
default: 0
}
},
modules: {
type: Array,
default: []
},
// 弃
chat: Object
});
try {
AppSchema.index({ updateTime: -1 });
AppSchema.index({ 'share.collection': -1 });
} catch (error) {
console.log(error);
}
export const App: Model<AppType> = models['app'] || model('app', AppSchema);

View File

@@ -0,0 +1,82 @@
import { Schema, model, models, Model } from 'mongoose';
import { ChatSchema as ChatType } from '@/types/mongoSchema';
import { ChatRoleMap, TaskResponseKeyEnum } from '@/constants/chat';
import { ChatSourceMap } from '@/constants/chat';
const ChatSchema = new Schema({
chatId: {
type: String,
require: true
},
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
appId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
},
updateTime: {
type: Date,
default: () => new Date()
},
title: {
type: String,
default: '历史记录'
},
customTitle: {
type: String,
default: ''
},
top: {
type: Boolean
},
variables: {
type: Object,
default: {}
},
source: {
type: String,
enum: Object.keys(ChatSourceMap),
required: true
},
shareId: {
type: String
},
isInit: {
type: Boolean,
default: false
},
content: {
type: [
{
obj: {
type: String,
required: true,
enum: Object.keys(ChatRoleMap)
},
value: {
type: String,
default: ''
},
[TaskResponseKeyEnum.responseData]: {
type: Array,
default: []
}
}
],
default: []
}
});
try {
ChatSchema.index({ userId: 1 });
ChatSchema.index({ updateTime: -1 });
ChatSchema.index({ appId: 1 });
} catch (error) {
console.log(error);
}
export const Chat: Model<ChatType> = models['chat'] || model('chat', ChatSchema);

View File

@@ -0,0 +1,67 @@
import { Schema, model, models, Model } from 'mongoose';
import { ChatItemSchema as ChatItemType } from '@/types/mongoSchema';
import { ChatRoleMap, TaskResponseKeyEnum } from '@/constants/chat';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24);
const ChatItemSchema = new Schema({
dataId: {
type: String,
require: true,
default: () => nanoid()
},
chatId: {
type: String,
require: true
},
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
appId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
},
time: {
type: Date,
default: () => new Date()
},
obj: {
type: String,
required: true,
enum: Object.keys(ChatRoleMap)
},
value: {
type: String,
default: ''
},
userFeedback: {
type: String
},
adminFeedback: {
type: {
kbId: String,
dataId: String,
content: String
}
},
[TaskResponseKeyEnum.responseData]: {
type: Array,
default: []
}
});
try {
ChatItemSchema.index({ time: -1 });
ChatItemSchema.index({ userId: 1 });
ChatItemSchema.index({ appId: 1 });
ChatItemSchema.index({ chatId: 1 });
ChatItemSchema.index({ userFeedback: 1 });
} catch (error) {
console.log(error);
}
export const ChatItem: Model<ChatItemType> =
models['chatItem'] || model('chatItem', ChatItemSchema);

View File

@@ -0,0 +1,18 @@
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { CollectionSchema as CollectionType } from '@/types/mongoSchema';
const CollectionSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
appId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
}
});
export const Collection: MongoModel<CollectionType> =
models['collection'] || model('collection', CollectionSchema);

View File

@@ -0,0 +1,15 @@
import { Schema, model, models, Model } from 'mongoose';
const ImageSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
binary: {
type: Buffer
}
});
export const Image: Model<{ userId: string; binary: Buffer }> =
models['image'] || model('image', ImageSchema);

View File

@@ -0,0 +1,40 @@
import { Schema, model, models, Model } from 'mongoose';
import { informSchema } from '@/types/mongoSchema';
import { InformTypeMap } from '@/constants/user';
const InformSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
time: {
type: Date,
default: () => new Date()
},
type: {
type: String,
enum: Object.keys(InformTypeMap)
},
title: {
type: String,
required: true
},
content: {
type: String,
required: true
},
read: {
type: Boolean,
default: false
}
});
try {
InformSchema.index({ time: -1 });
InformSchema.index({ userId: 1 });
} catch (error) {
console.log(error);
}
export const Inform: Model<informSchema> = models['inform'] || model('inform', InformSchema);

View File

@@ -0,0 +1,45 @@
import { Schema, model, models, Model } from 'mongoose';
import { kbSchema as SchemaType } from '@/types/mongoSchema';
import { KbTypeMap } from '@/constants/dataset';
const kbSchema = new Schema({
parentId: {
type: Schema.Types.ObjectId,
ref: 'kb',
default: null
},
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
updateTime: {
type: Date,
default: () => new Date()
},
avatar: {
type: String,
default: '/icon/logo.svg'
},
name: {
type: String,
required: true
},
vectorModel: {
type: String,
required: true,
default: 'text-embedding-ada-002'
},
type: {
type: String,
enum: Object.keys(KbTypeMap),
required: true,
default: 'dataset'
},
tags: {
type: [String],
default: []
}
});
export const KB: Model<SchemaType> = models['kb'] || model('kb', kbSchema);

View File

@@ -0,0 +1,29 @@
import { Schema, model, models, Model } from 'mongoose';
import { PaySchema as PayType } from '@/types/mongoSchema';
const PaySchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
createTime: {
type: Date,
default: () => new Date()
},
price: {
type: Number,
required: true
},
orderId: {
type: String,
required: true
},
status: {
// 支付的状态
type: String,
default: 'NOTPAY',
enum: ['SUCCESS', 'REFUND', 'NOTPAY', 'CLOSED']
}
});
export const Pay: Model<PayType> = models['pay'] || model('pay', PaySchema);

View File

@@ -0,0 +1,31 @@
import { Schema, model, models, Model } from 'mongoose';
import { PromotionRecordSchema as PromotionRecordType } from '@/types/mongoSchema';
const PromotionRecordSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
objUId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: false
},
createTime: {
type: Date,
default: () => new Date()
},
type: {
type: String,
required: true,
enum: ['pay', 'register']
},
amount: {
type: Number,
required: true
}
});
export const promotionRecord: Model<PromotionRecordType> =
models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema);

View File

@@ -0,0 +1,72 @@
/* 模型的知识库 */
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { TrainingDataSchema as TrainingDateType } from '@/types/mongoSchema';
import { TrainingTypeMap } from '@/constants/plugin';
// pgList and vectorList, Only one of them will work
const TrainingDataSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
kbId: {
type: Schema.Types.ObjectId,
ref: 'kb',
required: true
},
expireAt: {
type: Date,
default: () => new Date()
},
lockTime: {
type: Date,
default: () => new Date('2000/1/1')
},
mode: {
type: String,
enum: Object.keys(TrainingTypeMap),
required: true
},
vectorModel: {
type: String,
required: true,
default: 'text-embedding-ada-002'
},
prompt: {
// qa split prompt
type: String,
default: ''
},
q: {
type: String,
default: ''
},
a: {
type: String,
default: ''
},
source: {
type: String,
default: ''
},
file_id: {
type: String,
default: ''
},
billId: {
type: String,
default: ''
}
});
try {
TrainingDataSchema.index({ lockTime: 1 });
TrainingDataSchema.index({ userId: 1 });
TrainingDataSchema.index({ expireAt: 1 }, { expireAfterSeconds: 7 * 24 * 60 });
} catch (error) {
console.log(error);
}
export const TrainingData: MongoModel<TrainingDateType> =
models['trainingData'] || model('trainingData', TrainingDataSchema);

View File

@@ -0,0 +1,59 @@
import { Schema, model, models, Model } from 'mongoose';
import { hashPassword } from '@/service/utils/tools';
import { PRICE_SCALE } from '@fastgpt/common/bill/constants';
import { UserModelSchema } from '@/types/mongoSchema';
const UserSchema = new Schema({
username: {
// 可以是手机/邮箱,新的验证都只用手机
type: String,
required: true,
unique: true // 唯一
},
password: {
type: String,
required: true,
set: (val: string) => hashPassword(val),
get: (val: string) => hashPassword(val),
select: false
},
createTime: {
type: Date,
default: () => new Date()
},
avatar: {
type: String,
default: '/icon/human.png'
},
balance: {
type: Number,
default: 2 * PRICE_SCALE
},
inviterId: {
// 谁邀请注册的
type: Schema.Types.ObjectId,
ref: 'user'
},
promotionRate: {
type: Number,
default: 15
},
limit: {
exportKbTime: {
// Every half hour
type: Date
}
},
openaiAccount: {
type: {
key: String,
baseUrl: String
}
},
timezone: {
type: String,
default: 'Asia/Shanghai'
}
});
export const User: Model<UserModelSchema> = models['user'] || model('user', UserSchema);

View File

@@ -0,0 +1,172 @@
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
import { replaceVariable } from '@/utils/common/tools/text';
import { Prompt_CQJson } from '@/prompts/core/agent';
import { defaultCQModel } from '@/pages/api/system/getInitData';
type Props = ModuleDispatchProps<{
systemPrompt?: string;
history?: ChatItemType[];
[SystemInputEnum.userChatInput]: string;
[SpecialInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
}>;
type CQResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[key: string]: any;
};
const agentFunName = 'agent_user_question';
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
moduleName,
userOpenaiAccount,
inputs: { agents, userChatInput }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const cqModel = global.cqModel || defaultCQModel;
const { arg, tokens } = await (async () => {
if (cqModel.functionCall) {
return functionCall(props);
}
return completions(props);
})();
const result = agents.find((item) => item.key === arg?.type) || agents[0];
return {
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.classifyQuestion,
moduleName,
price: userOpenaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
tokens,
cqList: agents,
cqResult: result.value
}
};
};
async function functionCall({
userOpenaiAccount,
inputs: { agents, systemPrompt, history = [], userChatInput }
}: Props) {
const cqModel = global.cqModel;
const messages: ChatItemType[] = [
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
}
];
const filterMessages = ChatContextFilter({
messages,
maxTokens: cqModel.maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
// function body
const agentFunction = {
name: agentFunName,
description: '判断用户问题的类型属于哪方面,返回对应的字段',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: agents.map((item) => `${item.value},返回:'${item.key}'`).join(''),
enum: agents.map((item) => item.key)
}
},
required: ['type']
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const response = await chatAPI.createChatCompletion(
{
model: cqModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
}
);
const arg = JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '');
return {
arg,
tokens: response.data.usage?.total_tokens || 0
};
}
async function completions({
userOpenaiAccount,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
}: Props) {
const extractModel = global.extractModel;
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_CQJson, {
systemPrompt,
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
Human:${userChatInput}`
})
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
const id = agents.find((item) => answer.includes(item.key))?.key || '';
return {
tokens: totalTokens,
arg: { type: id }
};
}

View File

@@ -0,0 +1,227 @@
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
import { Prompt_ExtractJson } from '@/prompts/core/agent';
import { replaceVariable } from '@/utils/common/tools/text';
import { defaultExtractModel } from '@/pages/api/system/getInitData';
type Props = ModuleDispatchProps<{
history?: ChatItemType[];
[ContextExtractEnum.content]: string;
[ContextExtractEnum.extractKeys]: ContextExtractAgentItemType[];
[ContextExtractEnum.description]: string;
}>;
type Response = {
[ContextExtractEnum.success]?: boolean;
[ContextExtractEnum.failed]?: boolean;
[ContextExtractEnum.fields]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
};
const agentFunName = 'agent_extract_data';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
moduleName,
userOpenaiAccount,
inputs: { content, description, extractKeys }
} = props;
if (!content) {
return Promise.reject('Input is empty');
}
const extractModel = global.extractModel || defaultExtractModel;
const { arg, tokens } = await (async () => {
if (extractModel.functionCall) {
return functionCall(props);
}
return completions(props);
})();
// remove invalid key
for (let key in arg) {
if (!extractKeys.find((item) => item.key === key)) {
delete arg[key];
}
}
// auth fields
let success = !extractKeys.find((item) => !arg[item.key]);
// auth empty value
if (success) {
for (const key in arg) {
if (arg[key] === '') {
success = false;
break;
}
}
}
return {
[ContextExtractEnum.success]: success ? true : undefined,
[ContextExtractEnum.failed]: success ? undefined : true,
[ContextExtractEnum.fields]: JSON.stringify(arg),
...arg,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.contentExtract,
moduleName,
price: userOpenaiAccount?.key ? 0 : extractModel.price * tokens,
model: extractModel.name || '',
tokens,
extractDescription: description,
extractResult: arg
}
};
}
async function functionCall({
userOpenaiAccount,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
const messages: ChatItemType[] = [
...history,
{
obj: ChatRoleEnum.Human,
value: content
}
];
const filterMessages = ChatContextFilter({
messages,
maxTokens: extractModel.maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
const properties: Record<
string,
{
type: string;
description: string;
}
> = {};
extractKeys.forEach((item) => {
properties[item.key] = {
type: 'string',
description: item.desc
};
});
// function body
const agentFunction = {
name: agentFunName,
description: `${description}\n如果内容不存在返回空字符串。`,
parameters: {
type: 'object',
properties,
required: extractKeys.filter((item) => item.required).map((item) => item.key)
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const response = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0,
messages: [...adaptMessages],
function_call: { name: agentFunName },
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
}
);
const arg: Record<string, any> = (() => {
try {
return JSON.parse(response.data.choices?.[0]?.message?.function_call?.arguments || '{}');
} catch (error) {
return {};
}
})();
const tokens = response.data.usage?.total_tokens || 0;
return {
tokens,
arg
};
}
async function completions({
userOpenaiAccount,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_ExtractJson, {
description,
json: extractKeys
.map(
(item) =>
`key="${item.key}",描述="${item.desc}"required="${
item.required ? 'true' : 'false'
}"`
)
.join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
Human: ${content}`
})
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
model: extractModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';
const totalTokens = data.usage?.total_tokens || 0;
// parse response
const start = answer.indexOf('{');
const end = answer.lastIndexOf('}');
if (start === -1 || end === -1)
return {
tokens: totalTokens,
arg: {}
};
const jsonStr = answer
.substring(start, end + 1)
.replace(/(\\n|\\)/g, '')
.replace(/ /g, '');
try {
return {
tokens: totalTokens,
arg: JSON.parse(jsonStr) as Record<string, any>
};
} catch (error) {
return {
tokens: totalTokens,
arg: {}
};
}
}

View File

@@ -0,0 +1,405 @@
import type { NextApiResponse } from 'next';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatItemType, QuoteItemType } from '@/types/chat';
import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@/service/lib/openai';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/prompts/core/AIChat';
import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
import { Readable } from 'stream';
import { responseWrite, responseWriteController } from '@/service/common/stream';
import { addLog } from '@/service/utils/tools';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
userChatInput: string;
history?: ChatItemType[];
quoteQA?: QuoteItemType[];
limitPrompt?: string;
}
>;
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
finish: boolean;
};
/* request openai chat */
export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResponse> => {
let {
res,
moduleName,
stream = false,
detail = false,
userOpenaiAccount,
outputs,
inputs: {
model = global.chatModels[0]?.model,
temperature = 0,
maxToken = 4000,
history = [],
quoteQA = [],
userChatInput,
systemPrompt = '',
limitPrompt,
quoteTemplate,
quotePrompt
}
} = props;
if (!userChatInput) {
return Promise.reject('Question is empty');
}
// temperature adapt
const modelConstantsData = getChatModel(model);
if (!modelConstantsData) {
return Promise.reject('The chat model is undefined, you need to select a chat model.');
}
const { filterQuoteQA, quoteText } = filterQuote({
quoteQA,
model: modelConstantsData,
quoteTemplate
});
if (modelConstantsData.censor) {
await textCensor({
text: `${systemPrompt}
${quoteText}
${userChatInput}
`
});
}
const { messages, filterMessages } = getChatMessages({
model: modelConstantsData,
history,
quoteText,
quotePrompt,
userChatInput,
systemPrompt,
limitPrompt
});
const { max_tokens } = getMaxTokens({
model: modelConstantsData,
maxToken,
filterMessages
});
// console.log(messages);
// FastGPT temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const chatAPI = getAIChatApi(userOpenaiAccount);
const response = await chatAPI.createChatCompletion(
{
model,
temperature,
max_tokens,
messages: [
...(modelConstantsData.defaultSystem
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystem
}
]
: []),
...messages
],
stream
},
{
timeout: 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(userOpenaiAccount)
}
);
const { answerText, totalTokens, completeMessages } = await (async () => {
if (stream) {
// sse response
const { answer } = await streamResponse({
res,
detail,
response
});
// count tokens
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
value: answer
});
const totalTokens = countMessagesTokens({
messages: completeMessages
});
targetResponse({ res, detail, outputs });
return {
answerText: answer,
totalTokens,
completeMessages
};
} else {
const answer = response.data.choices?.[0].message?.content || '';
const totalTokens = response.data.usage?.total_tokens || 0;
const completeMessages = filterMessages.concat({
obj: ChatRoleEnum.AI,
value: answer
});
return {
answerText: answer,
totalTokens,
completeMessages
};
}
})();
return {
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
maxToken: max_tokens,
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages)
},
finish: true
};
};
function filterQuote({
quoteQA = [],
model,
quoteTemplate
}: {
quoteQA: ChatProps['inputs']['quoteQA'];
model: ChatModelItemType;
quoteTemplate?: string;
}) {
const sliceResult = sliceMessagesTB({
maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item, index) => ({
obj: ChatRoleEnum.System,
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
...item,
index: `${index + 1}`
})
}))
});
// slice filterSearch
const filterQuoteQA = quoteQA.slice(0, sliceResult.length);
const quoteText =
filterQuoteQA.length > 0
? `${filterQuoteQA
.map((item, index) =>
replaceVariable(quoteTemplate || defaultQuoteTemplate, {
...item,
index: `${index + 1}`
})
)
.join('\n')}`
: '';
return {
filterQuoteQA,
quoteText
};
}
function getChatMessages({
quotePrompt,
quoteText,
history = [],
systemPrompt,
limitPrompt,
userChatInput,
model
}: {
quotePrompt?: string;
quoteText: string;
history: ChatProps['inputs']['history'];
systemPrompt: string;
limitPrompt?: string;
userChatInput: string;
model: ChatModelItemType;
}) {
const question = quoteText
? replaceVariable(quotePrompt || defaultQuotePrompt, {
quote: quoteText,
question: userChatInput
})
: userChatInput;
const messages: ChatItemType[] = [
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
...(limitPrompt
? [
{
obj: ChatRoleEnum.System,
value: limitPrompt
}
]
: []),
{
obj: ChatRoleEnum.Human,
value: question
}
];
const filterMessages = ChatContextFilter({
messages,
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
return {
messages: adaptMessages,
filterMessages
};
}
function getMaxTokens({
maxToken,
model,
filterMessages = []
}: {
maxToken: number;
model: ChatModelItemType;
filterMessages: ChatProps['inputs']['history'];
}) {
const tokensLimit = model.contextMaxToken;
/* count response max token */
const promptsToken = countMessagesTokens({
messages: filterMessages
});
maxToken = maxToken + promptsToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
return {
max_tokens: maxToken
};
}
function targetResponse({
res,
outputs,
detail
}: {
res: NextApiResponse;
outputs: AppModuleItemType['outputs'];
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === TaskResponseKeyEnum.answerText)?.targets || [];
if (targets.length === 0) return;
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}
async function streamResponse({
res,
detail,
response
}: {
res: NextApiResponse;
detail: boolean;
response: any;
}) {
return new Promise<{ answer: string }>((resolve, reject) => {
const stream = response.data as Readable;
let answer = '';
const parseData = new SSEParseData();
const write = responseWriteController({
res,
readStream: stream
});
stream.on('data', (data) => {
if (res.closed) {
stream.destroy();
return resolve({ answer });
}
const parse = parseStreamChunk(data);
parse.forEach((item) => {
const { data } = parseData.parse(item);
if (!data || data === '[DONE]') return;
const content: string = data?.choices?.[0]?.delta?.content || '';
if (data.error) {
addLog.error(`SSE response`, data.error);
} else {
answer += content;
responseWrite({
write,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: content
})
});
}
});
});
stream.on('end', () => {
resolve({ answer });
});
stream.on('close', () => {
resolve({ answer });
});
stream.on('error', (err) => {
reject(err);
});
});
}
function getHistoryPreview(completeMessages: ChatItemType[]) {
return completeMessages.map((item, i) => {
if (item.obj === ChatRoleEnum.System) return item;
if (i >= completeMessages.length - 2) return item;
return {
...item,
value: item.value.length > 15 ? `${item.value.slice(0, 15)}...` : item.value
};
});
}

View File

@@ -0,0 +1,8 @@
export * from './init/history';
export * from './init/userChatInput';
export * from './chat/oneapi';
export * from './kb/search';
export * from './tools/answer';
export * from './tools/http';
export * from './agent/classifyQuestion';
export * from './agent/extract';

View File

@@ -0,0 +1,18 @@
import { SystemInputEnum } from '@/constants/app';
import { ChatItemType } from '@/types/chat';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type HistoryProps = ModuleDispatchProps<{
maxContext: number;
[SystemInputEnum.history]: ChatItemType[];
}>;
export const dispatchHistory = (props: Record<string, any>) => {
const {
inputs: { maxContext = 5, history = [] }
} = props as HistoryProps;
return {
history: maxContext > 0 ? history.slice(-maxContext) : []
};
};

View File

@@ -0,0 +1,15 @@
import { SystemInputEnum } from '@/constants/app';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type UserChatInputProps = ModuleDispatchProps<{
[SystemInputEnum.userChatInput]: string;
}>;
export const dispatchChatInput = (props: Record<string, any>) => {
const {
inputs: { userChatInput }
} = props as UserChatInputProps;
return {
userChatInput
};
};

View File

@@ -0,0 +1,74 @@
import { PgClient } from '@/service/pg';
import type { ChatHistoryItemResType } from '@/types/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getVector } from '@/pages/api/openapi/plugin/vector';
import { countModelPrice } from '@/service/common/bill/push';
import type { SelectedDatasetType } from '@/types/core/dataset';
import type { QuoteItemType } from '@/types/chat';
import { PgDatasetTableName } from '@/constants/plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
type KBSearchProps = ModuleDispatchProps<{
kbList: SelectedDatasetType;
similarity: number;
limit: number;
userChatInput: string;
}>;
export type KBSearchResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
isEmpty?: boolean;
unEmpty?: boolean;
quoteQA: QuoteItemType[];
};
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
const {
moduleName,
inputs: { kbList = [], similarity = 0.4, limit = 5, userChatInput }
} = props as KBSearchProps;
if (kbList.length === 0) {
return Promise.reject("You didn't choose the knowledge base");
}
if (!userChatInput) {
return Promise.reject('Your input is empty');
}
// get vector
const vectorModel = kbList[0]?.vectorModel || global.vectorModels[0];
const { vectors, tokenLen } = await getVector({
model: vectorModel.model,
input: [userChatInput]
});
// search kb
const res: any = await PgClient.query(
`BEGIN;
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
select kb_id,id,q,a,source,file_id from ${PgDatasetTableName} where kb_id IN (${kbList
.map((item) => `'${item.kbId}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
vectors[0]
}]' limit ${limit};
COMMIT;`
);
const searchRes: QuoteItemType[] = res?.[2]?.rows || [];
return {
isEmpty: searchRes.length === 0 ? true : undefined,
unEmpty: searchRes.length > 0 ? true : undefined,
quoteQA: searchRes,
responseData: {
moduleType: FlowModuleTypeEnum.kbSearchNode,
moduleName,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
model: vectorModel.name,
tokens: tokenLen,
similarity,
limit
}
};
}

View File

@@ -0,0 +1,36 @@
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { ModuleDispatchProps } from '@/types/core/modules';
export type AnswerProps = ModuleDispatchProps<{
text: string;
}>;
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const {
res,
detail,
stream,
inputs: { text = '' }
} = props as AnswerProps;
if (stream) {
sseResponse({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: text.replace(/\\n/g, '\n')
})
});
}
return {
[TaskResponseKeyEnum.answerText]: text,
finish: true
};
};

View File

@@ -0,0 +1,78 @@
import { TaskResponseKeyEnum } from '@/constants/chat';
import { HttpPropsEnum } from '@/constants/flow/flowField';
import { ChatHistoryItemResType } from '@/types/chat';
import { FlowModuleTypeEnum } from '@/constants/flow';
import { ModuleDispatchProps } from '@/types/core/modules';
export type HttpRequestProps = ModuleDispatchProps<{
[HttpPropsEnum.url]: string;
[key: string]: any;
}>;
export type HttpResponse = {
[HttpPropsEnum.finish]: boolean;
[HttpPropsEnum.failed]?: boolean;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[key: string]: any;
};
export const dispatchHttpRequest = async (props: Record<string, any>): Promise<HttpResponse> => {
const {
moduleName,
variables,
inputs: { url, ...body }
} = props as HttpRequestProps;
const requestBody = {
variables,
...body
};
try {
const response = await fetchData({
url,
body: requestBody
});
return {
[HttpPropsEnum.finish]: true,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.httpRequest,
moduleName,
price: 0,
body: requestBody,
httpResult: response
},
...response
};
} catch (error) {
return {
[HttpPropsEnum.finish]: true,
[HttpPropsEnum.failed]: true,
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.httpRequest,
moduleName,
price: 0,
body: requestBody,
httpResult: { error }
}
};
}
};
async function fetchData({
url,
body
}: {
url: string;
body: Record<string, any>;
}): Promise<Record<string, any>> {
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(body)
}).then((res) => res.json());
return response;
}

View File

@@ -0,0 +1,143 @@
import mongoose from 'mongoose';
import tunnel from 'tunnel';
import { startQueue } from './utils/tools';
import { getInitConfig } from '@/pages/api/system/getInitData';
import { User } from './models/user';
import { PRICE_SCALE } from '@fastgpt/common/bill/constants';
import { initPg } from './pg';
import { createHashPassword } from '@/utils/tools';
import { createLogger, format, transports } from 'winston';
import 'winston-mongodb';
import { getTikTokenEnc } from '@/utils/common/tiktoken';
/**
* connect MongoDB and init data
*/
export async function connectToDatabase(): Promise<void> {
if (global.mongodb) {
return;
}
global.mongodb = 'connecting';
// init global data
global.qaQueueLen = 0;
global.vectorQueueLen = 0;
global.sendInformQueue = [];
global.sendInformQueueLen = 0;
// proxy obj
if (process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT) {
global.httpsAgent = tunnel.httpsOverHttp({
proxy: {
host: process.env.AXIOS_PROXY_HOST,
port: +process.env.AXIOS_PROXY_PORT
}
});
}
// logger
initLogger();
// init function
getInitConfig();
// init tikToken
getTikTokenEnc();
try {
mongoose.set('strictQuery', true);
global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, {
bufferCommands: true,
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
minPoolSize: 2
});
await initRootUser();
initPg();
console.log('mongo connected');
} catch (error) {
console.log('error->', 'mongo connect error');
global.mongodb = null;
}
// init function
startQueue();
}
function initLogger() {
global.logger = createLogger({
transports: [
new transports.MongoDB({
db: process.env.MONGODB_URI as string,
collection: 'server_logs',
options: {
useUnifiedTopology: true
},
cappedSize: 500000000,
tryReconnect: true,
metaKey: 'meta',
format: format.combine(format.timestamp(), format.json())
}),
new transports.Console({
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.printf((info) => {
if (info.level === 'error') {
console.log(info.meta);
return `[${info.level.toLocaleUpperCase()}]: ${[info.timestamp]}: ${info.message}`;
}
return `[${info.level.toLocaleUpperCase()}]: ${[info.timestamp]}: ${info.message}${
info.meta ? `: ${JSON.stringify(info.meta)}` : ''
}`;
})
)
})
]
});
}
async function initRootUser() {
try {
const rootUser = await User.findOne({
username: 'root'
});
const psw = process.env.DEFAULT_ROOT_PSW || '123456';
if (rootUser) {
await User.findOneAndUpdate(
{ username: 'root' },
{
password: createHashPassword(psw),
balance: 999999 * PRICE_SCALE
}
);
} else {
await User.create({
username: 'root',
password: createHashPassword(psw),
balance: 999999 * PRICE_SCALE
});
}
console.log(`root user init:`, {
username: 'root',
password: psw
});
} catch (error) {
console.log('init root user error', error);
}
}
export * from './models/chat';
export * from './models/chatItem';
export * from './models/app';
export * from './models/user';
export * from './common/bill/schema';
export * from './models/pay';
export * from './models/trainingData';
export * from './models/promotionRecord';
export * from './models/collection';
export * from './models/kb';
export * from './models/inform';
export * from './models/image';
export * from './support/outLink/schema';
export * from './support/openapi/schema';

View File

@@ -0,0 +1,213 @@
import { Pool } from 'pg';
import type { QueryResultRow } from 'pg';
import { PgDatasetTableName } from '@/constants/plugin';
import { addLog } from './utils/tools';
import type { DatasetDataItemType } from '@/types/core/dataset/data';
export const connectPg = async (): Promise<Pool> => {
if (global.pgClient) {
return global.pgClient;
}
global.pgClient = new Pool({
connectionString: process.env.PG_URL,
max: Number(process.env.DB_MAX_LINK || 5),
keepAlive: true,
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 5000
});
global.pgClient.on('error', (err) => {
console.log(err);
global.pgClient = null;
connectPg();
});
try {
await global.pgClient.connect();
console.log('pg connected');
return global.pgClient;
} catch (error) {
global.pgClient = null;
return connectPg();
}
};
type WhereProps = (string | [string, string | number])[];
type GetProps = {
fields?: string[];
where?: WhereProps;
order?: { field: string; mode: 'DESC' | 'ASC' | string }[];
limit?: number;
offset?: number;
};
type DeleteProps = {
where: WhereProps;
};
type ValuesProps = { key: string; value?: string | number }[];
type UpdateProps = {
values: ValuesProps;
where: WhereProps;
};
type InsertProps = {
values: ValuesProps[];
};
class Pg {
private getWhereStr(where?: WhereProps) {
return where
? `WHERE ${where
.map((item) => {
if (typeof item === 'string') {
return item;
}
const val = typeof item[1] === 'number' ? item[1] : `'${String(item[1])}'`;
return `${item[0]}=${val}`;
})
.join(' ')}`
: '';
}
private getUpdateValStr(values: ValuesProps) {
return values
.map((item) => {
const val =
typeof item.value === 'number'
? item.value
: `'${String(item.value).replace(/\'/g, '"')}'`;
return `${item.key}=${val}`;
})
.join(',');
}
private getInsertValStr(values: ValuesProps[]) {
return values
.map(
(items) =>
`(${items
.map((item) =>
typeof item.value === 'number'
? item.value
: `'${String(item.value).replace(/\'/g, '"')}'`
)
.join(',')})`
)
.join(',');
}
async select<T extends QueryResultRow = any>(table: string, props: GetProps) {
const sql = `SELECT ${
!props.fields || props.fields?.length === 0 ? '*' : props.fields?.join(',')
}
FROM ${table}
${this.getWhereStr(props.where)}
${
props.order
? `ORDER BY ${props.order.map((item) => `${item.field} ${item.mode}`).join(',')}`
: ''
}
LIMIT ${props.limit || 10} OFFSET ${props.offset || 0}
`;
const pg = await connectPg();
return pg.query<T>(sql);
}
async count(table: string, props: GetProps) {
const sql = `SELECT COUNT(${props?.fields?.[0] || '*'})
FROM ${table}
${this.getWhereStr(props.where)}
`;
const pg = await connectPg();
return pg.query(sql).then((res) => Number(res.rows[0]?.count || 0));
}
async delete(table: string, props: DeleteProps) {
const sql = `DELETE FROM ${table} ${this.getWhereStr(props.where)}`;
const pg = await connectPg();
return pg.query(sql);
}
async update(table: string, props: UpdateProps) {
if (props.values.length === 0) {
return {
rowCount: 0
};
}
const sql = `UPDATE ${table} SET ${this.getUpdateValStr(props.values)} ${this.getWhereStr(
props.where
)}`;
const pg = await connectPg();
return pg.query(sql);
}
async insert(table: string, props: InsertProps) {
if (props.values.length === 0) {
return {
rowCount: 0
};
}
const fields = props.values[0].map((item) => item.key).join(',');
const sql = `INSERT INTO ${table} (${fields}) VALUES ${this.getInsertValStr(
props.values
)} RETURNING id`;
const pg = await connectPg();
return pg.query(sql);
}
async query<T extends QueryResultRow = any>(sql: string) {
const pg = await connectPg();
return pg.query<T>(sql);
}
}
export const PgClient = new Pg();
/**
* data insert dataset
*/
export const insertData2Dataset = ({
userId,
kbId,
data
}: {
userId: string;
kbId: string;
data: (DatasetDataItemType & {
vector: number[];
})[];
}) => {
return PgClient.insert(PgDatasetTableName, {
values: data.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'source', value: item.source?.slice(0, 60)?.trim() || '' },
{ key: 'file_id', value: item.file_id || '' },
{ key: 'q', value: item.q.replace(/'/g, '"') },
{ key: 'a', value: item.a.replace(/'/g, '"') },
{ key: 'vector', value: `[${item.vector}]` }
])
});
};
export async function initPg() {
try {
await connectPg();
await PgClient.query(`
CREATE EXTENSION IF NOT EXISTS vector;
CREATE TABLE IF NOT EXISTS ${PgDatasetTableName} (
id BIGSERIAL PRIMARY KEY,
vector VECTOR(1536) NOT NULL,
user_id VARCHAR(50) NOT NULL,
kb_id VARCHAR(50),
source VARCHAR(100),
file_id VARCHAR(100),
q TEXT NOT NULL,
a TEXT
);
CREATE INDEX IF NOT EXISTS modelData_userId_index ON ${PgDatasetTableName} USING HASH (user_id);
CREATE INDEX IF NOT EXISTS modelData_kbId_index ON ${PgDatasetTableName} USING HASH (kb_id);
CREATE INDEX IF NOT EXISTS idx_model_data_md5_q_a_user_id_kb_id ON ${PgDatasetTableName} (md5(q), md5(a), user_id, kb_id);
`);
console.log('init pg successful');
} catch (error) {
addLog.error('init pg error', error);
}
}

View File

@@ -0,0 +1,104 @@
import { sseResponseEventEnum } from '@/constants/chat';
import { NextApiResponse } from 'next';
import {
openaiError,
openaiAccountError,
proxyError,
ERROR_RESPONSE,
ERROR_ENUM
} from './errorCode';
import { clearCookie, sseResponse, addLog } from './utils/tools';
export interface ResponseType<T = any> {
code: number;
message: string;
data: T;
}
export const jsonRes = <T = any>(
res: NextApiResponse,
props?: {
code?: number;
message?: string;
data?: T;
error?: any;
}
) => {
const { code = 200, message = '', data = null, error } = props || {};
const errResponseKey = typeof error === 'string' ? error : error?.message;
// Specified error
if (ERROR_RESPONSE[errResponseKey]) {
// login is expired
if (errResponseKey === ERROR_ENUM.unAuthorization) {
clearCookie(res);
}
return res.json(ERROR_RESPONSE[errResponseKey]);
}
// another error
let msg = '';
if ((code < 200 || code >= 400) && !message) {
msg = error?.response?.statusText || error?.message || '请求错误';
if (typeof error === 'string') {
msg = error;
} else if (proxyError[error?.code]) {
msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) {
msg = openaiAccountError[error?.response?.data?.error?.code];
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
addLog.error(`response error: ${msg}`, error);
}
res.status(code).json({
code,
statusText: '',
message: message || msg,
data: data !== undefined ? data : null
});
};
export const sseErrRes = (res: NextApiResponse, error: any) => {
const errResponseKey = typeof error === 'string' ? error : error?.message;
// Specified error
if (ERROR_RESPONSE[errResponseKey]) {
// login is expired
if (errResponseKey === ERROR_ENUM.unAuthorization) {
clearCookie(res);
}
return sseResponse({
res,
event: sseResponseEventEnum.error,
data: JSON.stringify(ERROR_RESPONSE[errResponseKey])
});
}
let msg = error?.response?.statusText || error?.message || '请求错误';
if (typeof error === 'string') {
msg = error;
} else if (proxyError[error?.code]) {
msg = '网络连接异常';
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) {
msg = openaiAccountError[error?.response?.data?.error?.code];
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
addLog.error(`sse error: ${msg}`, error);
sseResponse({
res,
event: sseResponseEventEnum.error,
data: JSON.stringify({ message: msg })
});
};

View File

@@ -0,0 +1,38 @@
import { ERROR_ENUM } from '@/service/errorCode';
import { updateApiKeyUsedTime } from './index';
import { OpenApi } from './schema';
export async function authOpenApiKey({ apikey }: { apikey: string }) {
if (!apikey) {
return Promise.reject(ERROR_ENUM.unAuthApiKey);
}
try {
const openApi = await OpenApi.findOne({ apiKey: apikey });
if (!openApi) {
return Promise.reject(ERROR_ENUM.unAuthApiKey);
}
const userId = String(openApi.userId);
// auth limit
if (global.feConfigs?.isPlus) {
if (openApi?.limit?.expiredTime && openApi.limit.expiredTime.getTime() < Date.now()) {
return Promise.reject(`Key ${openApi.apiKey} is expired`);
}
if (
openApi?.limit?.credit &&
openApi.limit.credit > -1 &&
openApi.usage > openApi.limit.credit
) {
return Promise.reject(`Key ${openApi.apiKey} is over usage`);
}
}
updateApiKeyUsedTime(openApi._id);
return { apikey, userId, appId: openApi.appId };
} catch (error) {
return Promise.reject(error);
}
}

View File

@@ -0,0 +1,18 @@
import { OpenApi } from './schema';
export async function updateApiKeyUsedTime(id: string) {
await OpenApi.findByIdAndUpdate(id, {
lastUsedTime: new Date()
});
}
export async function updateApiKeyUsage({ apikey, usage }: { apikey: string; usage: number }) {
await OpenApi.findOneAndUpdate(
{ apiKey: apikey },
{
$inc: {
usage
}
}
);
}

View File

@@ -0,0 +1,57 @@
import { Schema, model, models, Model } from 'mongoose';
import { OpenApiSchema } from '@/types/support/openapi';
import { PRICE_SCALE } from '@fastgpt/common/bill/constants';
import { formatPrice } from '@fastgpt/common/bill/index';
const OpenApiSchema = new Schema(
{
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
apiKey: {
type: String,
required: true,
get: (val: string) => `******${val.substring(val.length - 4)}`
},
createTime: {
type: Date,
default: () => new Date()
},
lastUsedTime: {
type: Date
},
appId: {
type: String,
required: false
},
name: {
type: String,
default: 'Api Key'
},
usage: {
// total usage. value from bill total
type: Number,
default: 0,
get: (val: number) => formatPrice(val)
},
limit: {
expiredTime: {
type: Date
},
credit: {
// value from user settings
type: Number,
default: -1,
set: (val: number) => val * PRICE_SCALE,
get: (val: number) => formatPrice(val)
}
}
},
{
toObject: { getters: true }
}
);
export const OpenApi: Model<OpenApiSchema> = models['openapi'] || model('openapi', OpenApiSchema);

View File

@@ -0,0 +1,79 @@
import { PRICE_SCALE } from '@fastgpt/common/bill/constants';
import { IpLimit } from '@/service/common/ipLimit/schema';
import { authBalanceByUid, AuthUserTypeEnum } from '@/service/utils/auth';
import { OutLinkSchema } from '@/types/support/outLink';
import { OutLink } from './schema';
export async function authOutLinkChat({ shareId, ip }: { shareId: string; ip?: string | null }) {
// get outLink
const outLink = await OutLink.findOne({
shareId
});
if (!outLink) {
return Promise.reject('分享链接无效');
}
const uid = String(outLink.userId);
// authBalance
const user = await authBalanceByUid(uid);
// limit auth
await authOutLinkLimit({ outLink, ip });
return {
user,
userId: String(outLink.userId),
appId: String(outLink.appId),
authType: AuthUserTypeEnum.token,
responseDetail: outLink.responseDetail
};
}
export async function authOutLinkLimit({
outLink,
ip
}: {
outLink: OutLinkSchema;
ip?: string | null;
}) {
if (!ip || !outLink.limit) {
return;
}
if (outLink.limit.expiredTime && outLink.limit.expiredTime.getTime() < Date.now()) {
return Promise.reject('分享链接已过期');
}
if (outLink.limit.credit > -1 && outLink.total > outLink.limit.credit * PRICE_SCALE) {
return Promise.reject('链接超出使用限制');
}
const ipLimit = await IpLimit.findOne({ ip, eventId: outLink._id });
try {
if (!ipLimit) {
await IpLimit.create({
eventId: outLink._id,
ip,
account: outLink.limit.QPM - 1
});
return;
}
// over one minute
const diffTime = Date.now() - ipLimit.lastMinute.getTime();
if (diffTime >= 60 * 1000) {
ipLimit.account = outLink.limit.QPM - 1;
ipLimit.lastMinute = new Date();
return await ipLimit.save();
}
if (ipLimit.account <= 0) {
return Promise.reject(
`每分钟仅能请求 ${outLink.limit.QPM} 次, ${60 - Math.round(diffTime / 1000)}s 后重试~`
);
}
ipLimit.account = ipLimit.account - 1;
await ipLimit.save();
} catch (error) {}
}

View File

@@ -0,0 +1,22 @@
import { addLog } from '@/service/utils/tools';
import { OutLink } from './schema';
export const updateOutLinkUsage = async ({
shareId,
total
}: {
shareId: string;
total: number;
}) => {
try {
await OutLink.findOneAndUpdate(
{ shareId },
{
$inc: { total },
lastTime: new Date()
}
);
} catch (err) {
addLog.error('update shareChat error', err);
}
};

View File

@@ -0,0 +1,55 @@
import { Schema, model, models, Model } from 'mongoose';
import { OutLinkSchema as SchemaType } from '@/types/support/outLink';
import { OutLinkTypeEnum } from '@/constants/chat';
const OutLinkSchema = new Schema({
shareId: {
type: String,
required: true
},
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
appId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
},
type: {
type: String,
default: OutLinkTypeEnum.share
},
name: {
type: String,
required: true
},
total: {
// total amount
type: Number,
default: 0
},
lastTime: {
type: Date
},
responseDetail: {
type: Boolean,
default: false
},
limit: {
expiredTime: {
type: Date
},
QPM: {
type: Number,
default: 1000
},
credit: {
type: Number,
default: -1
}
}
});
export const OutLink: Model<SchemaType> = models['outlinks'] || model('outlinks', OutLinkSchema);

View File

@@ -0,0 +1,201 @@
import type { NextApiRequest } from 'next';
import Cookie from 'cookie';
import { App, OpenApi, User, KB } from '../mongo';
import type { AppSchema, UserModelSchema } from '@/types/mongoSchema';
import { ERROR_ENUM } from '../errorCode';
import { authJWT } from './tools';
import { authOpenApiKey } from '../support/openapi/auth';
export enum AuthUserTypeEnum {
token = 'token',
root = 'root',
apikey = 'apikey'
}
export const authCookieToken = async (cookie?: string, token?: string): Promise<string> => {
// 获取 cookie
const cookies = Cookie.parse(cookie || '');
const cookieToken = cookies.token || token;
if (!cookieToken) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return await authJWT(cookieToken);
};
/* auth balance */
export const authBalanceByUid = async (uid: string) => {
const user = await User.findById<UserModelSchema>(
uid,
'_id username balance openaiAccount timezone'
);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (user.balance <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
return user;
};
/* uniform auth user */
export const authUser = async ({
req,
authToken = false,
authRoot = false,
authBalance = false
}: {
req: NextApiRequest;
authToken?: boolean;
authRoot?: boolean;
authBalance?: boolean;
}) => {
const parseAuthorization = async (authorization?: string) => {
if (!authorization) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// Bearer fastgpt-xxxx-appId
const auth = authorization.split(' ')[1];
if (!auth) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const { apikey, appId: authorizationAppid = '' } = await (async () => {
const arr = auth.split('-');
// abandon
if (arr.length === 3) {
return {
apikey: `${arr[0]}-${arr[1]}`,
appId: arr[2]
};
}
if (arr.length === 2) {
return {
apikey: auth
};
}
return Promise.reject(ERROR_ENUM.unAuthorization);
})();
// auth apikey
const { userId, appId: apiKeyAppId = '' } = await authOpenApiKey({ apikey });
return {
uid: userId,
apikey,
appId: apiKeyAppId || authorizationAppid
};
};
const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return userId;
};
const { cookie, token, apikey, rootkey, userid, authorization } = (req.headers || {}) as {
cookie?: string;
token?: string;
apikey?: string;
rootkey?: string; // abandon
userid?: string;
authorization?: string;
};
let uid = '';
let appId = '';
let openApiKey = apikey;
let authType: `${AuthUserTypeEnum}` = AuthUserTypeEnum.token;
if (authToken) {
uid = await authCookieToken(cookie, token);
authType = AuthUserTypeEnum.token;
} else if (authRoot) {
uid = await parseRootKey(rootkey, userid);
authType = AuthUserTypeEnum.root;
} else if (cookie || token) {
uid = await authCookieToken(cookie, token);
authType = AuthUserTypeEnum.token;
} else if (apikey) {
const parseResult = await authOpenApiKey({ apikey });
uid = parseResult.userId;
authType = AuthUserTypeEnum.apikey;
openApiKey = parseResult.apikey;
} else if (authorization) {
const authResponse = await parseAuthorization(authorization);
uid = authResponse.uid;
appId = authResponse.appId;
openApiKey = authResponse.apikey;
authType = AuthUserTypeEnum.apikey;
} else if (rootkey) {
uid = await parseRootKey(rootkey, userid);
authType = AuthUserTypeEnum.root;
} else {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// balance check
const user = await (() => {
if (authBalance) {
return authBalanceByUid(uid);
}
})();
return {
userId: String(uid),
appId,
authType,
user,
apikey: openApiKey
};
};
// 模型使用权校验
export const authApp = async ({
appId,
userId,
authUser = true,
authOwner = true,
reserveDetail = false
}: {
appId: string;
userId: string;
authUser?: boolean;
authOwner?: boolean;
reserveDetail?: boolean; // focus reserve detail
}) => {
// 获取 app 数据
const app = await App.findById<AppSchema>(appId);
if (!app) {
return Promise.reject('App is not exists');
}
/*
Access verification
1. authOwner=true or authUser = true , just owner can use
2. authUser = false and share, anyone can use
*/
if (authOwner || authUser) {
if (userId !== String(app.userId)) return Promise.reject(ERROR_ENUM.unAuthModel);
}
return {
app,
showModelDetail: userId === String(app.userId)
};
};
// 知识库操作权限
export const authKb = async ({ kbId, userId }: { kbId: string; userId: string }) => {
const kb = await KB.findOne({
_id: kbId,
userId
});
if (kb) {
return kb;
}
return Promise.reject(ERROR_ENUM.unAuthKb);
};

View File

@@ -0,0 +1,93 @@
import { ChatItemType } from '@/types/chat';
import { Chat, App, ChatItem } from '@/service/mongo';
import { ChatSourceEnum } from '@/constants/chat';
type Props = {
chatId: string;
appId: string;
userId: string;
variables?: Record<string, any>;
isOwner: boolean;
source: `${ChatSourceEnum}`;
shareId?: string;
content: [ChatItemType, ChatItemType];
};
export async function saveChat({
chatId,
appId,
userId,
variables,
isOwner,
source,
shareId,
content
}: Props) {
try {
const chatHistory = await Chat.findOne(
{
chatId,
userId,
appId
},
'_id'
);
const promise: any[] = [
ChatItem.insertMany(
content.map((item) => ({
chatId,
userId,
appId,
...item
}))
)
];
if (chatHistory) {
promise.push(
Chat.updateOne(
{ chatId, userId, appId },
{
title: content[0].value.slice(0, 20),
updateTime: new Date()
}
)
);
} else {
promise.push(
Chat.create({
chatId,
userId,
appId,
variables,
title: content[0].value.slice(0, 20),
source,
shareId
})
);
}
if (isOwner && source === ChatSourceEnum.online) {
promise.push(
App.findByIdAndUpdate(appId, {
updateTime: new Date()
})
);
}
await Promise.all(promise);
} catch (error) {
Chat.updateOne(
{ chatId, userId },
{
$push: {
content: {
$each: [],
$slice: -10
}
}
}
);
}
}

View File

@@ -0,0 +1,24 @@
export const getChatModel = (model?: string) => {
return global.chatModels.find((item) => item.model === model);
};
export const getVectorModel = (model?: string) => {
return (
global.vectorModels.find((item) => item.model === model) || {
model: 'UnKnow',
name: 'UnKnow',
defaultToken: 500,
price: 0,
maxToken: 3000
}
);
};
export const getModel = (model?: string) => {
return [
...global.chatModels,
...global.vectorModels,
global.qaModel,
global.extractModel,
global.cqModel
].find((item) => item.model === model);
};

View File

@@ -0,0 +1,117 @@
import type { NextApiResponse, NextApiHandler, NextApiRequest } from 'next';
import NextCors from 'nextjs-cors';
import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
import { ERROR_ENUM } from '../errorCode';
/* 密码加密 */
export const hashPassword = (psw: string) => {
return crypto.createHash('sha256').update(psw).digest('hex');
};
/* 生成 token */
export const generateToken = (userId: string) => {
const key = process.env.TOKEN_KEY as string;
const token = jwt.sign(
{
userId,
exp: Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 7
},
key
);
return token;
};
// auth token
export const authJWT = (token: string) =>
new Promise<string>((resolve, reject) => {
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject(ERROR_ENUM.unAuthorization);
return;
}
resolve(decoded.userId);
});
});
/* set cookie */
export const setCookie = (res: NextApiResponse, token: string) => {
res.setHeader(
'Set-Cookie',
`token=${token}; Path=/; HttpOnly; Max-Age=604800; Samesite=None; Secure;`
);
};
/* clear cookie */
export const clearCookie = (res: NextApiResponse) => {
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
};
export function withNextCors(handler: NextApiHandler): NextApiHandler {
return async function nextApiHandlerWrappedWithNextCors(
req: NextApiRequest,
res: NextApiResponse
) {
const methods = ['GET', 'eHEAD', 'PUT', 'PATCH', 'POST', 'DELETE'];
const origin = req.headers.origin;
await NextCors(req, res, {
methods,
origin: origin,
optionsSuccessStatus: 200
});
return handler(req, res);
};
}
/* start task */
export const startQueue = () => {
for (let i = 0; i < global.systemEnv.qaMaxProcess; i++) {
generateQA();
}
for (let i = 0; i < global.systemEnv.vectorMaxProcess; i++) {
generateVector();
}
};
export const sseResponse = ({
res,
event,
data
}: {
res: NextApiResponse;
event?: string;
data: string;
}) => {
if (res.closed) return;
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};
/* add logger */
export const addLog = {
info: (msg: string, obj?: Record<string, any>) => {
global.logger?.info(msg, { meta: obj });
},
error: (msg: string, error?: any) => {
global.logger?.error(msg, {
meta: {
stack: error?.stack,
...(error?.config && {
config: {
headers: error.config.headers,
url: error.config.url,
data: error.config.data
}
}),
...(error?.response && {
response: {
status: error.response.status,
statusText: error.response.statusText
}
})
}
});
}
};