new framwork

This commit is contained in:
archer
2023-06-09 12:57:42 +08:00
parent d9450bd7ee
commit ba9d9c3d5f
263 changed files with 12269 additions and 11599 deletions

View File

@@ -0,0 +1,122 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
interface ConfigType {
headers?: { [key: string]: string };
hold?: boolean;
}
interface ResponseDataType {
code: number;
message: string;
data: any;
}
/**
* 请求开始
*/
function requestStart(config: InternalAxiosRequestConfig): InternalAxiosRequestConfig {
if (config.headers) {
config.headers.rootkey = process.env.ROOT_KEY;
}
return config;
}
/**
* 请求成功,检查请求头
*/
function responseSuccess(response: AxiosResponse<ResponseDataType>) {
return response;
}
/**
* 响应数据检查
*/
function checkRes(data: ResponseDataType) {
if (data === undefined) {
return Promise.reject('服务器异常');
} else if (data.code < 200 || data.code >= 400) {
return Promise.reject(data);
}
return data.data;
}
/**
* 响应错误
*/
function responseError(err: any) {
if (!err) {
return Promise.reject({ message: '未知错误' });
}
if (typeof err === 'string') {
return Promise.reject({ message: err });
}
return Promise.reject(err);
}
/* 创建请求实例 */
export const instance = axios.create({
timeout: 60000, // 超时时间
baseURL: `http://localhost:${process.env.PORT || 3000}/api`,
headers: {
rootkey: process.env.ROOT_KEY
}
});
/* 请求拦截 */
instance.interceptors.request.use(requestStart, (err) => Promise.reject(err));
/* 响应拦截 */
instance.interceptors.response.use(responseSuccess, (err) => Promise.reject(err));
function request(url: string, data: any, config: ConfigType, method: Method): any {
/* 去空 */
for (const key in data) {
if (data[key] === null || data[key] === undefined) {
delete data[key];
}
}
return instance
.request({
url,
method,
data: method === 'GET' ? null : data,
params: method === 'GET' ? data : null, // get请求不携带dataparams放在url上
...config // 用户自定义配置,可以覆盖前面的配置
})
.then((res) => checkRes(res.data))
.catch((err) => responseError(err));
}
/**
* api请求方式
* @param {String} url
* @param {Any} params
* @param {Object} config
* @returns
*/
export function GET<T = { data: any }>(
url: string,
params = {},
config: ConfigType = {}
): Promise<T> {
return request(url, params, config, 'GET');
}
export function POST<T = { data: any }>(
url: string,
data = {},
config: ConfigType = {}
): Promise<T> {
return request(url, data, config, 'POST');
}
export function PUT<T = { data: any }>(
url: string,
data = {},
config: ConfigType = {}
): Promise<T> {
return request(url, data, config, 'PUT');
}
export function DELETE<T = { data: any }>(url: string, config: ConfigType = {}): Promise<T> {
return request(url, {}, config, 'DELETE');
}

View File

@@ -0,0 +1,5 @@
import { POST } from './request';
import type { TextPluginRequestParams } from '@/types/plugin';
export const sensitiveCheck = (data: TextPluginRequestParams) =>
POST('/openapi/text/sensitiveCheck', data);

View File

@@ -0,0 +1,77 @@
export const ERROR_CODE: { [key: number]: string } = {
400: '请求失败',
401: '无权访问',
403: '紧张访问',
404: '请求不存在',
405: '请求方法错误',
406: '请求的格式错误',
410: '资源已删除',
422: '验证错误',
500: '服务器发生错误',
502: '网关错误',
503: '服务器暂时过载或维护',
504: '网关超时'
};
export const TOKEN_ERROR_CODE: Record<number, string> = {
403: '登录状态无效,请重新登录'
};
export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法',
rate_limit_reached: 'API被限制请稍后再试',
'Bad Request': 'Bad Request~ 可能内容太多了',
'Bad Gateway': '网关异常,请重试'
};
export const openaiAccountError: Record<string, string> = {
insufficient_quota: 'API 余额不足',
invalid_api_key: 'openai 账号异常',
account_deactivated: '账号已停用',
invalid_request_error: '无效请求'
};
export const proxyError: Record<string, boolean> = {
ECONNABORTED: true,
ECONNRESET: true
};
export enum ERROR_ENUM {
unAuthorization = 'unAuthorization',
insufficientQuota = 'insufficientQuota',
unAuthModel = 'unAuthModel',
unAuthKb = 'unAuthKb'
}
export const ERROR_RESPONSE: Record<
any,
{
code: number;
statusText: string;
message: string;
data?: any;
}
> = {
[ERROR_ENUM.unAuthorization]: {
code: 403,
statusText: ERROR_ENUM.unAuthorization,
message: '凭证错误',
data: null
},
[ERROR_ENUM.insufficientQuota]: {
code: 510,
statusText: ERROR_ENUM.insufficientQuota,
message: '账号余额不足',
data: null
},
[ERROR_ENUM.unAuthModel]: {
code: 511,
statusText: ERROR_ENUM.unAuthModel,
message: '无权使用该模型',
data: null
},
[ERROR_ENUM.unAuthKb]: {
code: 512,
statusText: ERROR_ENUM.unAuthKb,
message: '无权使用该知识库',
data: null
}
};

View File

@@ -0,0 +1,229 @@
import { TrainingData } from '@/service/mongo';
import { getApiKey } from '../utils/auth';
import { OpenAiChatEnum } from '@/constants/model';
import { pushSplitDataBill } from '@/service/events/pushBill';
import { openaiAccountError } from '../errorCode';
import { modelServiceToolMap } from '../utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { pushDataToKb } from '@/pages/api/openapi/kb/pushData';
import { TrainingModeEnum } from '@/constants/plugin';
import { ERROR_ENUM } from '../errorCode';
import { sendInform } from '@/pages/api/user/inform/send';
const reduceQueue = () => {
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
};
export async function generateQA(): Promise<any> {
const maxProcess = Number(process.env.QA_MAX_PROCESS || 10);
if (global.qaQueueLen >= maxProcess) return;
global.qaQueueLen++;
let trainingId = '';
let userId = '';
try {
const match = {
mode: TrainingModeEnum.qa,
lockTime: { $lte: new Date(Date.now() - 4 * 60 * 1000) }
};
// random get task
const agree = await TrainingData.aggregate([
{
$match: match
},
{ $sample: { size: 1 } },
{
$project: {
_id: 1
}
}
]);
// no task
if (agree.length === 0) {
reduceQueue();
global.qaQueueLen <= 0 && console.log(`没有需要【QA】的数据, ${global.qaQueueLen}`);
return;
}
const data = await TrainingData.findOneAndUpdate(
{
_id: agree[0]._id,
...match
},
{
lockTime: new Date()
}
).select({
_id: 1,
userId: 1,
kbId: 1,
prompt: 1,
q: 1,
source: 1
});
// task preemption
if (!data) {
reduceQueue();
return generateQA();
}
trainingId = data._id;
userId = String(data.userId);
const kbId = String(data.kbId);
// 余额校验并获取 openapi Key
const { systemAuthKey } = await getApiKey({
model: OpenAiChatEnum.GPT35,
userId,
type: 'training',
mustPay: true
});
const startTime = Date.now();
// 请求 chatgpt 获取回答
const response = await Promise.all(
[data.q].map((text) =>
modelServiceToolMap[OpenAiChatEnum.GPT35]
.chatCompletion({
apiKey: systemAuthKey,
temperature: 0.8,
messages: [
{
obj: ChatRoleEnum.System,
value: `你是出题人
${data.prompt || '下面是"一段长文本"'}
从中选出5至20个题目和答案.答案详细.按格式返回: Q1:
A1:
Q2:
A2:
...`
},
{
obj: 'Human',
value: text
}
],
stream: false
})
.then(({ totalTokens, responseText, responseMessages }) => {
const result = formatSplitText(responseText); // 格式化后的QA对
console.log(`split result length: `, result.length);
// 计费
pushSplitDataBill({
isPay: result.length > 0,
userId: data.userId,
type: BillTypeEnum.QA,
textLen: responseMessages.map((item) => item.value).join('').length,
totalTokens
});
return {
rawContent: responseText,
result
};
})
.catch((err) => {
console.log('QA拆分错误');
console.log(err.response?.status, err.response?.statusText, err.response?.data);
return Promise.reject(err);
})
)
);
const responseList = response.map((item) => item.result).flat();
// 创建 向量生成 队列
await pushDataToKb({
kbId,
data: responseList.map((item) => ({
...item,
source: data.source
})),
userId,
mode: TrainingModeEnum.index
});
// delete data from training
await TrainingData.findByIdAndDelete(data._id);
console.log('生成QA成功time:', `${(Date.now() - startTime) / 1000}s`);
reduceQueue();
generateQA();
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
console.log('openai error: 生成QA错误');
console.log(err.response?.status, err.response?.statusText, err.response?.data);
} else {
console.log('生成QA错误:', err);
}
// message error or openai account error
if (
err?.message === 'invalid message format' ||
err.response?.statusText === 'Unauthorized' ||
openaiAccountError[err?.response?.data?.error?.code || err?.response?.data?.error?.type]
) {
await TrainingData.findByIdAndRemove(trainingId);
}
// 账号余额不足,删除任务
if (userId && err === ERROR_ENUM.insufficientQuota) {
sendInform({
type: 'system',
title: 'QA 任务中止',
content: '由于账号余额不足QA 任务中止,重新充值后将会继续。',
userId
});
console.log('余额不足,暂停向量生成任务');
await TrainingData.updateMany(
{
userId
},
{
lockTime: new Date('2999/5/5')
}
);
return generateQA();
}
// unlock
await TrainingData.findByIdAndUpdate(trainingId, {
lockTime: new Date('2000/1/1')
});
setTimeout(() => {
generateQA();
}, 1000);
}
}
/**
* 检查文本是否按格式返回
*/
function formatSplitText(text: string) {
const regex = /Q\d+:(\s*)(.*)(\s*)A\d+:(\s*)([\s\S]*?)(?=Q|$)/g; // 匹配Q和A的正则表达式
const matches = text.matchAll(regex); // 获取所有匹配到的结果
const result = []; // 存储最终的结果
for (const match of matches) {
const q = match[2];
const a = match[5];
if (q && a) {
// 如果Q和A都存在就将其添加到结果中
result.push({
q,
a: a.trim().replace(/\n\s*/g, '\n')
});
}
}
return result;
}

View File

@@ -0,0 +1,158 @@
import { openaiAccountError } from '../errorCode';
import { insertKbItem } from '@/service/pg';
import { openaiEmbedding } from '@/pages/api/openapi/plugin/openaiEmbedding';
import { TrainingData } from '../models/trainingData';
import { ERROR_ENUM } from '../errorCode';
import { TrainingModeEnum } from '@/constants/plugin';
import { sendInform } from '@/pages/api/user/inform/send';
const reduceQueue = () => {
global.vectorQueueLen = global.vectorQueueLen > 0 ? global.vectorQueueLen - 1 : 0;
};
/* 索引生成队列。每导入一次,就是一个单独的线程 */
export async function generateVector(): Promise<any> {
const maxProcess = Number(process.env.VECTOR_MAX_PROCESS || 10);
if (global.vectorQueueLen >= maxProcess) return;
global.vectorQueueLen++;
let trainingId = '';
let userId = '';
try {
const match = {
mode: TrainingModeEnum.index,
lockTime: { $lte: new Date(Date.now() - 2 * 60 * 1000) }
};
// random get task
const agree = await TrainingData.aggregate([
{
$match: match
},
{ $sample: { size: 1 } },
{
$project: {
_id: 1
}
}
]);
// no task
if (agree.length === 0) {
reduceQueue();
global.vectorQueueLen <= 0 && console.log(`没有需要【索引】的数据, ${global.vectorQueueLen}`);
return;
}
const data = await TrainingData.findOneAndUpdate(
{
_id: agree[0]._id,
...match
},
{
lockTime: new Date()
}
).select({
_id: 1,
userId: 1,
kbId: 1,
q: 1,
a: 1,
source: 1
});
// task preemption
if (!data) {
reduceQueue();
return generateVector();
}
trainingId = data._id;
userId = String(data.userId);
const kbId = String(data.kbId);
const dataItems = [
{
q: data.q,
a: data.a
}
];
// 生成词向量
const vectors = await openaiEmbedding({
input: dataItems.map((item) => item.q),
userId,
type: 'training',
mustPay: true
});
// 生成结果插入到 pg
await insertKbItem({
userId,
kbId,
data: vectors.map((vector, i) => ({
q: dataItems[i].q,
a: dataItems[i].a,
source: data.source,
vector
}))
});
// delete data from training
await TrainingData.findByIdAndDelete(data._id);
console.log(`生成向量成功: ${data._id}`);
reduceQueue();
generateVector();
} catch (err: any) {
reduceQueue();
// log
if (err?.response) {
console.log('openai error: 生成向量错误');
console.log(err.response?.status, err.response?.statusText, err.response?.data);
} else {
console.log('生成向量错误:', err);
}
// message error or openai account error
if (
err?.message === 'invalid message format' ||
err.response?.statusText === 'Unauthorized' ||
openaiAccountError[err?.response?.data?.error?.code || err?.response?.data?.error?.type]
) {
console.log('删除一个任务');
await TrainingData.findByIdAndRemove(trainingId);
}
// 账号余额不足,删除任务
if (userId && err === ERROR_ENUM.insufficientQuota) {
sendInform({
type: 'system',
title: '索引生成任务中止',
content: '由于账号余额不足,索引生成任务中止,重新充值后将会继续。',
userId
});
console.log('余额不足,暂停向量生成任务');
await TrainingData.updateMany(
{
userId
},
{
lockTime: new Date('2999/5/5')
}
);
return generateVector();
}
// unlock
err.response?.statusText !== 'Too Many Requests' &&
(await TrainingData.findByIdAndUpdate(trainingId, {
lockTime: new Date('2000/1/1')
}));
setTimeout(() => {
generateVector();
}, 1000);
}
}

View File

@@ -0,0 +1,180 @@
import { connectToDatabase, Bill, User, ShareChat } from '../mongo';
import {
ChatModelMap,
OpenAiChatEnum,
ChatModelType,
embeddingModel,
embeddingPrice
} from '@/constants/model';
import { BillTypeEnum } from '@/constants/user';
export const pushChatBill = async ({
isPay,
chatModel,
userId,
chatId,
textLen,
tokens,
type
}: {
isPay: boolean;
chatModel: ChatModelType;
userId: string;
chatId?: '' | string;
textLen: number;
tokens: number;
type: BillTypeEnum.chat | BillTypeEnum.openapiChat;
}) => {
console.log(`chat generate success. text len: ${textLen}. token len: ${tokens}. pay:${isPay}`);
if (!isPay) return;
let billId = '';
try {
await connectToDatabase();
// 计算价格
const unitPrice = ChatModelMap[chatModel]?.price || 3;
const price = unitPrice * tokens;
try {
// 插入 Bill 记录
const res = await Bill.create({
userId,
type,
modelName: chatModel,
chatId: chatId ? chatId : undefined,
textLen,
tokenLen: tokens,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
console.log(error);
}
};
export const updateShareChatBill = async ({
shareId,
tokens
}: {
shareId: string;
tokens: number;
}) => {
try {
await ShareChat.findByIdAndUpdate(shareId, {
$inc: { tokens },
lastTime: new Date()
});
} catch (error) {
console.log('update shareChat error', error);
}
};
export const pushSplitDataBill = async ({
isPay,
userId,
totalTokens,
textLen,
type
}: {
isPay: boolean;
userId: string;
totalTokens: number;
textLen: number;
type: BillTypeEnum.QA;
}) => {
console.log(
`splitData generate success. text len: ${textLen}. token len: ${totalTokens}. pay:${isPay}`
);
if (!isPay) return;
let billId;
try {
await connectToDatabase();
// 获取模型单价格, 都是用 gpt35 拆分
const unitPrice = ChatModelMap[OpenAiChatEnum.GPT35].price || 3;
// 计算价格
const price = unitPrice * totalTokens;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type,
modelName: OpenAiChatEnum.GPT35,
textLen,
tokenLen: totalTokens,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
};
export const pushGenerateVectorBill = async ({
isPay,
userId,
text,
tokenLen
}: {
isPay: boolean;
userId: string;
text: string;
tokenLen: number;
}) => {
// console.log(
// `vector generate success. text len: ${text.length}. token len: ${tokenLen}. pay:${isPay}`
// );
if (!isPay) return;
let billId;
try {
await connectToDatabase();
try {
// 计算价格. 至少为1
let price = embeddingPrice * tokenLen;
price = price > 1 ? price : 1;
// 插入 Bill 记录
const res = await Bill.create({
userId,
type: BillTypeEnum.vector,
modelName: embeddingModel,
textLen: text.length,
tokenLen,
price
});
billId = res._id;
// 账号扣费
await User.findByIdAndUpdate(userId, {
$inc: { balance: -price }
});
} catch (error) {
console.log('创建账单失败:', error);
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {
console.log(error);
}
};

View File

@@ -0,0 +1,26 @@
import { Schema, model, models, Model } from 'mongoose';
import { AuthCodeSchema as AuthCodeType } from '@/types/mongoSchema';
const AuthCodeSchema = new Schema({
username: {
type: String,
required: true
},
code: {
type: String,
required: true,
length: 6
},
type: {
type: String,
enum: ['register', 'findPassword'],
required: true
},
expiredTime: {
type: Number,
default: () => Date.now() + 5 * 60 * 1000
}
});
export const AuthCode: Model<AuthCodeType> =
models['auth_code'] || model('auth_code', AuthCodeSchema);

View File

@@ -0,0 +1,53 @@
import { Schema, model, models, Model } from 'mongoose';
import { ChatModelMap, embeddingModel } from '@/constants/model';
import { BillSchema as BillType } from '@/types/mongoSchema';
import { BillTypeMap } from '@/constants/user';
const BillSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
type: {
type: String,
enum: Object.keys(BillTypeMap),
required: true
},
modelName: {
type: String,
enum: [...Object.keys(ChatModelMap), embeddingModel],
required: true
},
chatId: {
type: Schema.Types.ObjectId,
ref: 'chat'
},
time: {
type: Date,
default: () => new Date()
},
textLen: {
// 提示词+响应的总字数
type: Number,
required: true
},
tokenLen: {
// 折算成 token 的数量
type: Number,
required: true
},
price: {
type: Number,
required: true
}
});
try {
BillSchema.index({ time: -1 });
BillSchema.index({ userId: 1 });
} catch (error) {
console.log(error);
}
export const Bill: Model<BillType> = models['bill'] || model('bill', BillSchema);

View File

@@ -0,0 +1,78 @@
import { Schema, model, models, Model } from 'mongoose';
import { ChatSchema as ChatType } from '@/types/mongoSchema';
import { ChatRoleMap } from '@/constants/chat';
const ChatSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
modelId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
},
expiredTime: {
// 过期时间
type: Number,
default: () => new Date()
},
loadAmount: {
// 剩余加载次数
type: Number,
default: -1
},
updateTime: {
type: Date,
default: () => new Date()
},
title: {
type: String,
default: '历史记录'
},
customTitle: {
type: String,
default: ''
},
latestChat: {
type: String,
default: ''
},
top: {
type: Boolean
},
content: {
type: [
{
obj: {
type: String,
required: true,
enum: Object.keys(ChatRoleMap)
},
value: {
type: String,
required: true
},
quote: {
type: [
{
id: String,
q: String,
a: String,
source: String
}
],
default: []
},
systemPrompt: {
type: String,
default: ''
}
}
],
default: []
}
});
export const Chat: Model<ChatType> = models['chat'] || model('chat', ChatSchema);

View File

@@ -0,0 +1,18 @@
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { CollectionSchema as CollectionType } from '@/types/mongoSchema';
const CollectionSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
modelId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
}
});
export const Collection: MongoModel<CollectionType> =
models['collection'] || model('collection', CollectionSchema);

View File

@@ -0,0 +1,40 @@
import { Schema, model, models, Model } from 'mongoose';
import { informSchema } from '@/types/mongoSchema';
import { InformTypeMap } from '@/constants/user';
const InformSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
time: {
type: Date,
default: () => new Date()
},
type: {
type: String,
enum: Object.keys(InformTypeMap)
},
title: {
type: String,
required: true
},
content: {
type: String,
required: true
},
read: {
type: Boolean,
default: false
}
});
try {
InformSchema.index({ time: -1 });
InformSchema.index({ userId: 1 });
} catch (error) {
console.log(error);
}
export const Inform: Model<informSchema> = models['inform'] || model('inform', InformSchema);

View File

@@ -0,0 +1,28 @@
import { Schema, model, models, Model } from 'mongoose';
import { kbSchema as SchemaType } from '@/types/mongoSchema';
const kbSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
updateTime: {
type: Date,
default: () => new Date()
},
avatar: {
type: String,
default: '/icon/logo.png'
},
name: {
type: String,
required: true
},
tags: {
type: [String],
default: []
}
});
export const KB: Model<SchemaType> = models['kb'] || model('kb', kbSchema);

View File

@@ -0,0 +1,92 @@
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { ModelSchema as ModelType } from '@/types/mongoSchema';
import {
ModelVectorSearchModeMap,
appVectorSearchModeEnum,
ChatModelMap,
OpenAiChatEnum
} from '@/constants/model';
const ModelSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
name: {
type: String,
required: true
},
avatar: {
type: String,
default: '/icon/logo.png'
},
status: {
type: String,
required: true,
enum: ['waiting', 'running', 'training', 'closed']
},
updateTime: {
type: Date,
default: () => new Date()
},
chat: {
relatedKbs: {
type: [Schema.Types.ObjectId],
ref: 'kb',
default: []
},
searchMode: {
// knowledge base search mode
type: String,
enum: Object.keys(ModelVectorSearchModeMap),
default: appVectorSearchModeEnum.hightSimilarity
},
systemPrompt: {
// 系统提示词
type: String,
default: ''
},
temperature: {
type: Number,
min: 0,
max: 10,
default: 0
},
chatModel: {
// 聊天时使用的模型
type: String,
enum: Object.keys(ChatModelMap),
default: OpenAiChatEnum.GPT35
}
},
share: {
isShare: {
type: Boolean,
default: false
},
isShareDetail: {
// share model detail info. false: just show name and intro
type: Boolean,
default: false
},
intro: {
type: String,
default: '',
maxlength: 150
},
collection: {
type: Number,
default: 0
}
}
});
try {
ModelSchema.index({ updateTime: -1 });
ModelSchema.index({ 'share.collection': -1 });
} catch (error) {
console.log(error);
}
export const Model: MongoModel<ModelType> = models['model'] || model('model', ModelSchema);

View File

@@ -0,0 +1,23 @@
import { Schema, model, models, Model } from 'mongoose';
import { OpenApiSchema } from '@/types/mongoSchema';
const OpenApiSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
apiKey: {
type: String,
required: true
},
createTime: {
type: Date,
default: () => new Date()
},
lastUsedTime: {
type: Date
}
});
export const OpenApi: Model<OpenApiSchema> = models['openapi'] || model('openapi', OpenApiSchema);

View File

@@ -0,0 +1,29 @@
import { Schema, model, models, Model } from 'mongoose';
import { PaySchema as PayType } from '@/types/mongoSchema';
const PaySchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
createTime: {
type: Date,
default: () => new Date()
},
price: {
type: Number,
required: true
},
orderId: {
type: String,
required: true
},
status: {
// 支付的状态
type: String,
default: 'NOTPAY',
enum: ['SUCCESS', 'REFUND', 'NOTPAY', 'CLOSED']
}
});
export const Pay: Model<PayType> = models['pay'] || model('pay', PaySchema);

View File

@@ -0,0 +1,31 @@
import { Schema, model, models, Model } from 'mongoose';
import { PromotionRecordSchema as PromotionRecordType } from '@/types/mongoSchema';
const PromotionRecordSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
objUId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: false
},
createTime: {
type: Date,
default: () => new Date()
},
type: {
type: String,
required: true,
enum: ['invite', 'shareModel', 'withdraw']
},
amount: {
type: Number,
required: true
}
});
export const promotionRecord: Model<PromotionRecordType> =
models['promotionRecord'] || model('promotionRecord', PromotionRecordSchema);

View File

@@ -0,0 +1,38 @@
import { Schema, model, models, Model } from 'mongoose';
import { ShareChatSchema as ShareChatSchemaType } from '@/types/mongoSchema';
import { hashPassword } from '@/service/utils/tools';
const ShareChatSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
modelId: {
type: Schema.Types.ObjectId,
ref: 'model',
required: true
},
name: {
type: String,
required: true
},
password: {
type: String,
set: (val: string) => hashPassword(val)
},
tokens: {
type: Number,
default: 0
},
maxContext: {
type: Number,
default: 20
},
lastTime: {
type: Date
}
});
export const ShareChat: Model<ShareChatSchemaType> =
models['shareChat'] || model('shareChat', ShareChatSchema);

View File

@@ -0,0 +1,48 @@
/* 模型的知识库 */
import { Schema, model, models, Model as MongoModel } from 'mongoose';
import { TrainingDataSchema as TrainingDateType } from '@/types/mongoSchema';
import { TrainingTypeMap } from '@/constants/plugin';
// pgList and vectorList, Only one of them will work
const TrainingDataSchema = new Schema({
userId: {
type: Schema.Types.ObjectId,
ref: 'user',
required: true
},
kbId: {
type: Schema.Types.ObjectId,
ref: 'kb',
required: true
},
lockTime: {
type: Date,
default: () => new Date('2000/1/1')
},
mode: {
type: String,
enum: Object.keys(TrainingTypeMap),
required: true
},
prompt: {
// 拆分时的提示词
type: String,
default: ''
},
q: {
// 如果是
type: String,
default: ''
},
a: {
type: String,
default: ''
},
source: {
type: String,
default: ''
}
});
export const TrainingData: MongoModel<TrainingDateType> =
models['trainingData'] || model('trainingData', TrainingDataSchema);

View File

@@ -0,0 +1,57 @@
import { Schema, model, models, Model } from 'mongoose';
import { hashPassword } from '@/service/utils/tools';
import { PRICE_SCALE } from '@/constants/common';
import { UserModelSchema } from '@/types/mongoSchema';
const UserSchema = new Schema({
username: {
// 可以是手机/邮箱,新的验证都只用手机
type: String,
required: true,
unique: true // 唯一
},
password: {
type: String,
required: true,
set: (val: string) => hashPassword(val),
get: (val: string) => hashPassword(val),
select: false
},
createTime: {
type: Date,
default: () => new Date()
},
avatar: {
type: String,
default: '/icon/human.png'
},
balance: {
// 平台余额,不可提现
type: Number,
default: 2 * PRICE_SCALE
},
inviterId: {
// 谁邀请注册的
type: Schema.Types.ObjectId,
ref: 'user'
},
promotion: {
rate: {
// 返现比例
type: Number,
default: 15
}
},
openaiKey: {
type: String,
default: ''
},
limit: {
exportKbTime: {
// Every half hour
type: Date
}
}
});
export const User: Model<UserModelSchema> = models['user'] || model('user', UserSchema);

View File

@@ -0,0 +1,59 @@
import mongoose from 'mongoose';
import tunnel from 'tunnel';
import { TrainingData } from './mongo';
import { startQueue } from './utils/tools';
/**
* 连接 MongoDB 数据库
*/
export async function connectToDatabase(): Promise<void> {
if (global.mongodb) {
return;
}
global.mongodb = 'connecting';
try {
mongoose.set('strictQuery', true);
global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, {
bufferCommands: true,
dbName: process.env.MONGODB_NAME,
maxPoolSize: 5,
minPoolSize: 1,
maxConnecting: 5
});
console.log('mongo connected');
} catch (error) {
console.log('error->', 'mongo connect error');
global.mongodb = null;
}
// 创建代理对象
if (process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT) {
global.httpsAgent = tunnel.httpsOverHttp({
proxy: {
host: process.env.AXIOS_PROXY_HOST,
port: +process.env.AXIOS_PROXY_PORT
}
});
}
// 初始化队列
global.qaQueueLen = 0;
global.vectorQueueLen = 0;
startQueue();
}
export * from './models/authCode';
export * from './models/chat';
export * from './models/model';
export * from './models/user';
export * from './models/bill';
export * from './models/pay';
export * from './models/trainingData';
export * from './models/openapi';
export * from './models/promotionRecord';
export * from './models/collection';
export * from './models/shareChat';
export * from './models/kb';
export * from './models/inform';

187
client/src/service/pg.ts Normal file
View File

@@ -0,0 +1,187 @@
import { Pool } from 'pg';
import type { QueryResultRow } from 'pg';
export const connectPg = async () => {
if (global.pgClient) {
return global.pgClient;
}
const maxLink = Number(process.env.VECTOR_MAX_PROCESS || 10);
global.pgClient = new Pool({
host: process.env.PG_HOST,
port: process.env.PG_PORT ? +process.env.PG_PORT : 5432,
user: process.env.PG_USER,
password: process.env.PG_PASSWORD,
database: process.env.PG_DB_NAME,
max: Math.floor(maxLink * 0.5),
idleTimeoutMillis: 60000,
connectionTimeoutMillis: 20000
});
global.pgClient.on('error', (err) => {
console.log(err);
global.pgClient = null;
});
try {
await global.pgClient.connect();
console.log('pg connected');
return global.pgClient;
} catch (error) {
global.pgClient = null;
return Promise.reject(error);
}
};
type WhereProps = (string | [string, string | number])[];
type GetProps = {
fields?: string[];
where?: WhereProps;
order?: { field: string; mode: 'DESC' | 'ASC' | string }[];
limit?: number;
offset?: number;
};
type DeleteProps = {
where: WhereProps;
};
type ValuesProps = { key: string; value: string | number }[];
type UpdateProps = {
values: ValuesProps;
where: WhereProps;
};
type InsertProps = {
values: ValuesProps[];
};
class Pg {
private getWhereStr(where?: WhereProps) {
return where
? `WHERE ${where
.map((item) => {
if (typeof item === 'string') {
return item;
}
const val = typeof item[1] === 'number' ? item[1] : `'${String(item[1])}'`;
return `${item[0]}=${val}`;
})
.join(' ')}`
: '';
}
private getUpdateValStr(values: ValuesProps) {
return values
.map((item) => {
const val =
typeof item.value === 'number'
? item.value
: `'${String(item.value).replace(/\'/g, '"')}'`;
return `${item.key}=${val}`;
})
.join(',');
}
private getInsertValStr(values: ValuesProps[]) {
return values
.map(
(items) =>
`(${items
.map((item) =>
typeof item.value === 'number'
? item.value
: `'${String(item.value).replace(/\'/g, '"')}'`
)
.join(',')})`
)
.join(',');
}
async select<T extends QueryResultRow = any>(table: string, props: GetProps) {
const sql = `SELECT ${
!props.fields || props.fields?.length === 0 ? '*' : props.fields?.join(',')
}
FROM ${table}
${this.getWhereStr(props.where)}
${
props.order
? `ORDER BY ${props.order.map((item) => `${item.field} ${item.mode}`).join(',')}`
: ''
}
LIMIT ${props.limit || 10} OFFSET ${props.offset || 0}
`;
const pg = await connectPg();
return pg.query<T>(sql);
}
async count(table: string, props: GetProps) {
const sql = `SELECT COUNT(${props?.fields?.[0] || '*'})
FROM ${table}
${this.getWhereStr(props.where)}
`;
const pg = await connectPg();
return pg.query(sql).then((res) => Number(res.rows[0]?.count || 0));
}
async delete(table: string, props: DeleteProps) {
const sql = `DELETE FROM ${table} ${this.getWhereStr(props.where)}`;
const pg = await connectPg();
return pg.query(sql);
}
async update(table: string, props: UpdateProps) {
if (props.values.length === 0) {
return {
rowCount: 0
};
}
const sql = `UPDATE ${table} SET ${this.getUpdateValStr(props.values)} ${this.getWhereStr(
props.where
)}`;
const pg = await connectPg();
return pg.query(sql);
}
async insert(table: string, props: InsertProps) {
if (props.values.length === 0) {
return {
rowCount: 0
};
}
const fields = props.values[0].map((item) => item.key).join(',');
const sql = `INSERT INTO ${table} (${fields}) VALUES ${this.getInsertValStr(props.values)} `;
const pg = await connectPg();
return pg.query(sql);
}
async query<T extends QueryResultRow = any>(sql: string) {
const pg = await connectPg();
return pg.query<T>(sql);
}
}
export const PgClient = new Pg();
/**
* data insert kb
*/
export const insertKbItem = ({
userId,
kbId,
data
}: {
userId: string;
kbId: string;
data: {
vector: number[];
q: string;
a: string;
source?: string;
}[];
}) => {
return PgClient.insert('modelData', {
values: data.map((item) => [
{ key: 'user_id', value: userId },
{ key: 'kb_id', value: kbId },
{ key: 'source', value: item.source?.slice(0, 30)?.trim() || '' },
{ key: 'q', value: item.q.replace(/'/g, '"') },
{ key: 'a', value: item.a.replace(/'/g, '"') },
{ key: 'vector', value: `[${item.vector}]` }
])
});
};

View File

@@ -0,0 +1,34 @@
import axios from 'axios';
{
/*Bing 搜索*/
}
const BingSearch = async (wait_val: string) => {
const response = await axios.post('newbing中转服务器', {
prompt: wait_val
});
const result = response.data.result;
return result;
};
{
/*google 搜索*/
}
const GoogleSearch = async (wait_val: string) => {
const response = await axios.get('https://www.googleapis.com/customsearch/v1', {
params: {
key: process.env.GOOGLE_KEY,
q: wait_val,
cx: process.env.searchEngineId,
start: 1,
num: 3,
dateRestrict: 'm[1]' //搜索结果限定为一个月内
}
});
const results = response.data.items;
if (results !== null) {
const result = results.map((item: { snippet: string }) => item.snippet).join('\n');
return result;
}
};
export { BingSearch, GoogleSearch };

View File

@@ -0,0 +1,63 @@
import { NextApiResponse } from 'next';
import {
openaiError,
openaiAccountError,
proxyError,
ERROR_RESPONSE,
ERROR_ENUM
} from './errorCode';
import { clearCookie } from './utils/tools';
export interface ResponseType<T = any> {
code: number;
message: string;
data: T;
}
export const jsonRes = <T = any>(
res: NextApiResponse,
props?: {
code?: number;
message?: string;
data?: T;
error?: any;
}
) => {
const { code = 200, message = '', data = null, error } = props || {};
const errResponseKey = typeof error === 'string' ? error : error?.message;
// Specified error
if (ERROR_RESPONSE[errResponseKey]) {
// login is expired
if (errResponseKey === ERROR_ENUM.unAuthorization) {
clearCookie(res);
}
return res.json(ERROR_RESPONSE[errResponseKey]);
}
// another error
let msg = message || error?.message;
if ((code < 200 || code >= 400) && !message) {
msg = error?.message || '请求错误';
if (typeof error === 'string') {
msg = error;
} else if (proxyError[error?.code]) {
msg = '接口连接异常';
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (openaiAccountError[error?.response?.data?.error?.code]) {
msg = openaiAccountError[error?.response?.data?.error?.code];
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
console.log(error);
}
res.json({
code,
statusText: '',
message: msg,
data: data !== undefined ? data : null
});
};

View File

@@ -0,0 +1,352 @@
import type { NextApiRequest } from 'next';
import jwt from 'jsonwebtoken';
import Cookie from 'cookie';
import { Chat, Model, OpenApi, User, ShareChat, KB } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema';
import type { ChatItemSimpleType } from '@/types/chat';
import mongoose from 'mongoose';
import { ClaudeEnum, defaultModel, embeddingModel, EmbeddingModelType } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export type ApiKeyType = 'training' | 'chat';
export const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => {
// 获取 cookie
const cookies = Cookie.parse(cookie || '');
const token = cookies.token;
if (!token) {
return reject(ERROR_ENUM.unAuthorization);
}
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
if (err || !decoded?.userId) {
reject(ERROR_ENUM.unAuthorization);
return;
}
resolve(decoded.userId);
});
});
};
/* uniform auth user */
export const authUser = async ({
req,
authToken = false,
authOpenApi = false,
authRoot = false,
authBalance = false
}: {
req: NextApiRequest;
authToken?: boolean;
authOpenApi?: boolean;
authRoot?: boolean;
authBalance?: boolean;
}) => {
const parseOpenApiKey = async (apiKey?: string) => {
if (!apiKey) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
try {
const openApi = await OpenApi.findOne({ apiKey });
if (!openApi) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const userId = String(openApi.userId);
// 更新使用的时间
await OpenApi.findByIdAndUpdate(openApi._id, {
lastUsedTime: new Date()
});
return userId;
} catch (error) {
return Promise.reject(error);
}
};
const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return userId;
};
const { cookie, apikey, rootkey, userid } = (req.headers || {}) as {
cookie?: string;
apikey?: string;
rootkey?: string;
userid?: string;
};
let uid = '';
if (authToken) {
uid = await parseCookie(cookie);
} else if (authOpenApi) {
uid = await parseOpenApiKey(apikey);
} else if (authRoot) {
uid = await parseRootKey(rootkey, userid);
} else if (cookie) {
uid = await parseCookie(cookie);
} else if (apikey) {
uid = await parseOpenApiKey(apikey);
} else if (rootkey) {
uid = await parseRootKey(rootkey, userid);
} else {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (authBalance) {
const user = await User.findById(uid);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (!user.openaiKey && formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
}
return {
userId: uid
};
};
/* random get openai api key */
export const getSystemOpenAiKey = (type: ApiKeyType) => {
const keys = (() => {
if (type === 'training') {
return process.env.OPENAI_TRAINING_KEY?.split(',') || [];
}
return process.env.OPENAIKEY?.split(',') || [];
})();
// 纯字符串类型
const i = Math.floor(Math.random() * keys.length);
return keys[i] || (process.env.OPENAIKEY as string);
};
/* 获取 api 请求的 key */
export const getApiKey = async ({
model,
userId,
mustPay = false,
type = 'chat'
}: {
model: ChatModelType;
userId: string;
mustPay?: boolean;
type?: ApiKeyType;
}) => {
const user = await User.findById(userId);
if (!user) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const keyMap = {
[OpenAiChatEnum.GPT35]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getSystemOpenAiKey(type) as string
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.GPT4KEY as string
},
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.GPT4KEY as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',
systemAuthKey: process.env.CLAUDE_KEY as string
}
};
// 有自己的key
if (!mustPay && keyMap[model].userOpenAiKey) {
return {
user,
userOpenAiKey: keyMap[model].userOpenAiKey,
systemAuthKey: ''
};
}
// 平台账号余额校验
if (formatPrice(user.balance) <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
return {
user,
userOpenAiKey: '',
systemAuthKey: keyMap[model].systemAuthKey
};
};
// 模型使用权校验
export const authModel = async ({
modelId,
userId,
authUser = true,
authOwner = true,
reserveDetail = false
}: {
modelId: string;
userId: string;
authUser?: boolean;
authOwner?: boolean;
reserveDetail?: boolean; // focus reserve detail
}) => {
// 获取 model 数据
const model = await Model.findById<ModelSchema>(modelId);
if (!model) {
return Promise.reject('模型不存在');
}
/*
Access verification
1. authOwner=true or authUser = true , just owner can use
2. authUser = false and share, anyone can use
*/
if (authOwner || (authUser && !model.share.isShare)) {
if (userId !== String(model.userId)) return Promise.reject(ERROR_ENUM.unAuthModel);
}
// do not share detail info
if (!reserveDetail && !model.share.isShareDetail && userId !== String(model.userId)) {
model.chat = {
...defaultModel.chat,
chatModel: model.chat.chatModel
};
}
return {
model,
showModelDetail: model.share.isShareDetail || userId === String(model.userId)
};
};
// 知识库操作权限
export const authKb = async ({ kbId, userId }: { kbId: string; userId: string }) => {
const kb = await KB.findOne({
_id: kbId,
userId
});
if (kb) {
return kb;
}
return Promise.reject(ERROR_ENUM.unAuthKb);
};
// 获取对话校验
export const authChat = async ({
modelId,
chatId,
req
}: {
modelId: string;
chatId?: string;
req: NextApiRequest;
}) => {
const { userId } = await authUser({ req, authToken: true });
// 获取 model 数据
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 聊天内容
let content: ChatItemSimpleType[] = [];
if (chatId) {
// 获取 chat 数据
content = await Chat.aggregate([
{ $match: { _id: new mongoose.Types.ObjectId(chatId) } },
{
$project: {
content: {
$slice: ['$content', -50] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
obj: '$content.obj',
value: '$content.value',
quote: '$content.quote'
}
}
]);
}
// 获取 user 的 apiKey
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: model.chat.chatModel,
userId
});
return {
userOpenAiKey,
systemAuthKey,
content,
userId,
model,
showModelDetail
};
};
export const authShareChat = async ({
shareId,
password
}: {
shareId: string;
password: string;
}) => {
// get shareChat
const shareChat = await ShareChat.findById(shareId);
if (!shareChat) {
return Promise.reject('分享链接已失效');
}
if (shareChat.password !== hashPassword(password)) {
return Promise.reject({
code: 501,
message: '密码不正确'
});
}
const modelId = String(shareChat.modelId);
const userId = String(shareChat.userId);
// 获取 model 数据
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 获取 user 的 apiKey
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: model.chat.chatModel,
userId
});
return {
userOpenAiKey,
systemAuthKey,
userId,
model,
showModelDetail
};
};

View File

@@ -0,0 +1,77 @@
import { ChatCompletionType, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
import axios from 'axios';
/* 模型对话 */
export const claudChat = async ({ apiKey, messages, stream, chatId }: ChatCompletionType) => {
// get system prompt
const systemPrompt = messages
.filter((item) => item.obj === 'System')
.map((item) => item.value)
.join('\n');
const systemPromptText = systemPrompt ? `你本次知识:${systemPrompt}\n下面是我的问题:` : '';
const prompt = `${systemPromptText}'${messages[messages.length - 1].value}'`;
const response = await axios.post(
process.env.CLAUDE_BASE_URL || '',
{
prompt,
stream,
conversationId: chatId
},
{
headers: {
Authorization: apiKey
},
timeout: stream ? 60000 : 240000,
responseType: stream ? 'stream' : 'json'
}
);
const responseText = stream ? '' : response.data?.text || '';
return {
streamResponse: response,
responseMessages: messages.concat({
obj: ChatRoleEnum.AI,
value: responseText
}),
responseText,
totalTokens: 0
};
};
/* openai stream response */
export const claudStreamResponse = async ({ res, chatResponse, prompts }: StreamResponseType) => {
try {
let responseContent = '';
try {
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (res.closed) {
break;
}
const content = decoder.decode(chunk);
responseContent += content;
content && res.write(content);
}
} catch (error) {
console.log('pipe error', error);
}
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
return {
responseContent,
totalTokens: 0,
finishMessages
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@@ -0,0 +1,167 @@
import { ChatItemSimpleType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import { claudChat, claudStreamResponse } from './claude';
import type { NextApiResponse } from 'next';
export type ChatCompletionType = {
apiKey: string;
temperature: number;
messages: ChatItemSimpleType[];
chatId?: string;
[key: string]: any;
};
export type ChatCompletionResponseType = {
streamResponse: any;
responseMessages: ChatItemSimpleType[];
responseText: string;
totalTokens: number;
};
export type StreamResponseType = {
chatResponse: any;
prompts: ChatItemSimpleType[];
res: NextApiResponse;
[key: string]: any;
};
export type StreamResponseReturnType = {
responseContent: string;
totalTokens: number;
finishMessages: ChatItemSimpleType[];
};
export const modelServiceToolMap: Record<
ChatModelType,
{
chatCompletion: (data: ChatCompletionType) => Promise<ChatCompletionResponseType>;
streamResponse: (data: StreamResponseType) => Promise<StreamResponseReturnType>;
}
> = {
[OpenAiChatEnum.GPT35]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT35, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT35,
...data
})
},
[OpenAiChatEnum.GPT4]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT4, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT4,
...data
})
},
[OpenAiChatEnum.GPT432k]: {
chatCompletion: (data: ChatCompletionType) =>
chatResponse({ model: OpenAiChatEnum.GPT432k, ...data }),
streamResponse: (data: StreamResponseType) =>
openAiStreamResponse({
model: OpenAiChatEnum.GPT432k,
...data
})
},
[ClaudeEnum.Claude]: {
chatCompletion: claudChat,
streamResponse: claudStreamResponse
}
};
/* delete invalid symbol */
const simplifyStr = (str: string) =>
str
.replace(/\n+/g, '\n') // 连续空行
.replace(/[^\S\r\n]+/g, ' ') // 连续空白内容
.trim();
/* 聊天上下文 tokens 截断 */
export const ChatContextFilter = ({
model,
prompts,
maxTokens
}: {
model: ChatModelType;
prompts: ChatItemSimpleType[];
maxTokens: number;
}) => {
const systemPrompts: ChatItemSimpleType[] = [];
const chatPrompts: ChatItemSimpleType[] = [];
let rawTextLen = 0;
prompts.forEach((item) => {
const val = simplifyStr(item.value);
rawTextLen += val.length;
const data = {
obj: item.obj,
value: val
};
if (item.obj === ChatRoleEnum.System) {
systemPrompts.push(data);
} else {
chatPrompts.push(data);
}
});
// 长度太小时,不需要进行 token 截断
if (rawTextLen < maxTokens * 0.5) {
return [...systemPrompts, ...chatPrompts];
}
// 去掉 system 的 token
maxTokens -= modelToolMap[model].countTokens({
messages: systemPrompts
});
// 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = [];
// 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) {
chats.unshift(chatPrompts[i]);
const tokens = modelToolMap[model].countTokens({
messages: chats
});
/* 整体 tokens 超出范围, system必须保留 */
if (tokens >= maxTokens) {
chats.shift();
break;
}
}
return [...systemPrompts, ...chats];
};
/* stream response */
export const resStreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
const { responseContent, totalTokens, finishMessages } = await modelServiceToolMap[
model
].streamResponse({
chatResponse,
prompts,
res
});
return { responseContent, totalTokens, finishMessages };
};

View File

@@ -0,0 +1,120 @@
import { Configuration, OpenAIApi } from 'openai';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { axiosConfig } from '../tools';
import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
import { modelToolMap } from '@/utils/plugin';
import { ChatCompletionType, ChatContextFilter, StreamResponseType } from './index';
import { ChatRoleEnum } from '@/constants/chat';
export const getOpenAIApi = () =>
new OpenAIApi(
new Configuration({
basePath: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1'
})
);
/* 模型对话 */
export const chatResponse = async ({
model,
apiKey,
temperature,
messages,
stream
}: ChatCompletionType & { model: `${OpenAiChatEnum}` }) => {
const filterMessages = ChatContextFilter({
model,
prompts: messages,
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85)
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature) || 0,
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容
stream,
stop: ['.!?。']
},
{
timeout: stream ? 60000 : 240000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(apiKey)
}
);
const responseText = stream ? '' : response.data.choices[0].message?.content || '';
const totalTokens = stream ? 0 : response.data.usage?.total_tokens || 0;
return {
streamResponse: response,
responseMessages: filterMessages.concat({ obj: 'AI', value: responseText }),
responseText,
totalTokens
};
};
/* openai stream response */
export const openAiStreamResponse = async ({
res,
model,
chatResponse,
prompts
}: StreamResponseType & {
model: `${OpenAiChatEnum}`;
}) => {
try {
let responseContent = '';
const onParse = async (event: ParsedEvent | ReconnectInterval) => {
if (event.type !== 'event') return;
const data = event.data;
if (data === '[DONE]') return;
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
!res.closed && content && res.write(content);
} catch (error) {
error;
}
};
try {
const decoder = new TextDecoder();
const parser = createParser(onParse);
for await (const chunk of chatResponse.data as any) {
if (res.closed) {
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
} catch (error) {
return Promise.reject(error);
}
};

View File

@@ -0,0 +1,36 @@
import { promotionRecord } from '../mongo';
export const pushPromotionRecord = async ({
userId,
objUId,
type,
amount
}: {
userId: string;
objUId: string;
type: 'invite' | 'shareModel';
amount: number;
}) => {
try {
await promotionRecord.create({
userId,
objUId,
type,
amount
});
} catch (error) {
console.log('创建推广记录异常', error);
}
};
export const withdrawRecord = async ({ userId, amount }: { userId: string; amount: number }) => {
try {
await promotionRecord.create({
userId,
type: 'withdraw',
amount
});
} catch (error) {
console.log('提现记录异常', error);
}
};

View File

@@ -0,0 +1,71 @@
import * as nodemailer from 'nodemailer';
import { UserAuthTypeEnum } from '@/constants/common';
import Dysmsapi, * as dysmsapi from '@alicloud/dysmsapi20170525';
// @ts-ignore
import * as OpenApi from '@alicloud/openapi-client';
// @ts-ignore
import * as Util from '@alicloud/tea-util';
const myEmail = process.env.MY_MAIL;
const mailTransport = nodemailer.createTransport({
// host: 'smtp.qq.phone',
service: 'qq',
secure: true, //安全方式发送,建议都加上
auth: {
user: myEmail,
pass: process.env.MAILE_CODE
}
});
const emailMap: { [key: string]: any } = {
[UserAuthTypeEnum.register]: {
subject: '注册 FastGPT 账号',
html: (code: string) => `<div>您正在注册 FastGPT 账号,验证码为:${code}</div>`
},
[UserAuthTypeEnum.findPassword]: {
subject: '修改 FastGPT 密码',
html: (code: string) => `<div>您正在修改 FastGPT 账号密码,验证码为:${code}</div>`
}
};
export const sendEmailCode = (email: string, code: string, type: `${UserAuthTypeEnum}`) => {
return new Promise((resolve, reject) => {
const options = {
from: `"FastGPT" ${myEmail}`,
to: email,
subject: emailMap[type]?.subject,
html: emailMap[type]?.html(code)
};
mailTransport.sendMail(options, function (err, msg) {
if (err) {
console.log('send email error->', err);
reject('发生邮件异常');
} else {
resolve('');
}
});
});
};
export const sendPhoneCode = async (phone: string, code: string) => {
const accessKeyId = process.env.aliAccessKeyId;
const accessKeySecret = process.env.aliAccessKeySecret;
const signName = process.env.aliSignName;
const templateCode = process.env.aliTemplateCode;
const endpoint = 'dysmsapi.aliyuncs.com';
const sendSmsRequest = new dysmsapi.SendSmsRequest({
phoneNumbers: phone,
signName,
templateCode,
templateParam: `{"code":${code}}`
});
const config = new OpenApi.Config({ accessKeyId, accessKeySecret, endpoint });
const client = new Dysmsapi(config);
const runtime = new Util.RuntimeOptions({});
const res = await client.sendSmsWithOptions(sendSmsRequest, runtime);
if (res.body.code !== 'OK') {
return Promise.reject(res.body.message || '发送短信失败');
}
};

View File

@@ -0,0 +1,72 @@
import type { NextApiResponse, NextApiHandler, NextApiRequest } from 'next';
import NextCors from 'nextjs-cors';
import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
/* 密码加密 */
export const hashPassword = (psw: string) => {
return crypto.createHash('sha256').update(psw).digest('hex');
};
/* 生成 token */
export const generateToken = (userId: string) => {
const key = process.env.TOKEN_KEY as string;
const token = jwt.sign(
{
userId,
exp: Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 7
},
key
);
return token;
};
/* set cookie */
export const setCookie = (res: NextApiResponse, userId: string) => {
res.setHeader('Set-Cookie', `token=${generateToken(userId)}; Path=/; HttpOnly; Max-Age=604800`);
};
/* clear cookie */
export const clearCookie = (res: NextApiResponse) => {
res.setHeader('Set-Cookie', 'token=; Path=/; Max-Age=0');
};
/* openai axios config */
export const axiosConfig = (apikey: string) => ({
baseURL: process.env.OPENAI_BASE_URL || 'https://api.openai.com/v1',
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${apikey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
});
export function withNextCors(handler: NextApiHandler): NextApiHandler {
return async function nextApiHandlerWrappedWithNextCors(
req: NextApiRequest,
res: NextApiResponse
) {
const methods = ['GET', 'eHEAD', 'PUT', 'PATCH', 'POST', 'DELETE'];
const origin = req.headers.origin;
await NextCors(req, res, {
methods,
origin: origin,
optionsSuccessStatus: 200
});
return handler(req, res);
};
}
export const startQueue = () => {
const qaMax = Number(process.env.QA_MAX_PROCESS || 10);
const vectorMax = Number(process.env.VECTOR_MAX_PROCESS || 10);
for (let i = 0; i < qaMax; i++) {
generateQA();
}
for (let i = 0; i < vectorMax; i++) {
generateVector();
}
};

View File

@@ -0,0 +1,31 @@
// @ts-ignore
import Payment from 'wxpay-v3';
export const getPayment = () => {
return new Payment({
appid: process.env.WX_APPID,
mchid: process.env.WX_MCHID,
private_key: process.env.WX_PRIVATE_KEY?.replace(/\\n/g, '\n'),
serial_no: process.env.WX_SERIAL_NO,
apiv3_private_key: process.env.WX_V3_CODE,
notify_url: process.env.WX_NOTIFY_URL
});
};
export const nativePay = (amount: number, payId: string): Promise<string> =>
getPayment()
.native({
description: 'Fast GPT 余额充值',
out_trade_no: payId,
amount: {
total: amount
}
})
.then((res: any) => JSON.parse(res.data).code_url);
export const getPayResult = (payId: string) =>
getPayment()
.getTransactionsByOutTradeNo({
out_trade_no: payId
})
.then((res: any) => JSON.parse(res.data));