mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-27 16:33:49 +00:00
feat: admin set env
This commit is contained in:
@@ -201,7 +201,7 @@ export async function appKbSearch({
|
||||
searchPrompts: [
|
||||
{
|
||||
obj: ChatRoleEnum.System,
|
||||
value: `知识库:${systemPrompt}`
|
||||
value: `知识库:<${systemPrompt}>`
|
||||
},
|
||||
guidePrompt
|
||||
]
|
||||
|
@@ -8,7 +8,7 @@ import { axiosConfig } from '@/service/utils/tools';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
if (process.env.SENSITIVE_CHECK !== '1') {
|
||||
if (global.systemEnv.sensitiveCheck) {
|
||||
return jsonRes(res);
|
||||
}
|
||||
|
||||
|
32
client/src/pages/api/system/updateEnv.ts
Normal file
32
client/src/pages/api/system/updateEnv.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@/service/response';
|
||||
import { System } from '@/service/models/system';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
|
||||
export type InitDateResponse = {
|
||||
beianText: string;
|
||||
googleVerKey: string;
|
||||
};
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
await authUser({ req, authRoot: true });
|
||||
updateSystemEnv();
|
||||
jsonRes<InitDateResponse>(res);
|
||||
}
|
||||
|
||||
export async function updateSystemEnv() {
|
||||
try {
|
||||
const mongoData = await System.findOne();
|
||||
|
||||
if (mongoData) {
|
||||
const obj = mongoData.toObject();
|
||||
global.systemEnv = {
|
||||
...global.systemEnv,
|
||||
...obj
|
||||
};
|
||||
}
|
||||
console.log('update env', global.systemEnv);
|
||||
} catch (error) {
|
||||
console.log('update system env error');
|
||||
}
|
||||
}
|
@@ -16,9 +16,7 @@ const reduceQueue = () => {
|
||||
};
|
||||
|
||||
export async function generateQA(): Promise<any> {
|
||||
const maxProcess = Number(process.env.QA_MAX_PROCESS || 10);
|
||||
|
||||
if (global.qaQueueLen >= maxProcess) return;
|
||||
if (global.qaQueueLen >= global.systemEnv.qaMaxProcess) return;
|
||||
global.qaQueueLen++;
|
||||
|
||||
let trainingId = '';
|
||||
|
@@ -12,9 +12,7 @@ const reduceQueue = () => {
|
||||
|
||||
/* 索引生成队列。每导入一次,就是一个单独的线程 */
|
||||
export async function generateVector(): Promise<any> {
|
||||
const maxProcess = Number(process.env.VECTOR_MAX_PROCESS || 10);
|
||||
|
||||
if (global.vectorQueueLen >= maxProcess) return;
|
||||
if (global.vectorQueueLen >= global.systemEnv.vectorMaxProcess) return;
|
||||
global.vectorQueueLen++;
|
||||
|
||||
let trainingId = '';
|
||||
|
30
client/src/service/models/system.ts
Normal file
30
client/src/service/models/system.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { Schema, model, models } from 'mongoose';
|
||||
|
||||
const SystemSchema = new Schema({
|
||||
openAIKeys: {
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
openAITrainingKeys: {
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
gpt4Key: {
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
vectorMaxProcess: {
|
||||
type: Number,
|
||||
default: 10
|
||||
},
|
||||
qaMaxProcess: {
|
||||
type: Number,
|
||||
default: 10
|
||||
},
|
||||
sensitiveCheck: {
|
||||
type: Boolean,
|
||||
default: false
|
||||
}
|
||||
});
|
||||
|
||||
export const System = models['system'] || model('system', SystemSchema);
|
@@ -1,7 +1,7 @@
|
||||
import mongoose from 'mongoose';
|
||||
import tunnel from 'tunnel';
|
||||
import { TrainingData } from './mongo';
|
||||
import { startQueue } from './utils/tools';
|
||||
import { updateSystemEnv } from '@/pages/api/system/updateEnv';
|
||||
|
||||
/**
|
||||
* 连接 MongoDB 数据库
|
||||
@@ -11,6 +11,27 @@ export async function connectToDatabase(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
// init global data
|
||||
global.qaQueueLen = 0;
|
||||
global.vectorQueueLen = 0;
|
||||
global.systemEnv = {
|
||||
openAIKeys: process.env.OPENAIKEY || '',
|
||||
openAITrainingKeys: process.env.OPENAI_TRAINING_KEY || '',
|
||||
gpt4Key: process.env.GPT4KEY || '',
|
||||
vectorMaxProcess: 10,
|
||||
qaMaxProcess: 10,
|
||||
sensitiveCheck: false
|
||||
};
|
||||
// proxy obj
|
||||
if (process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT) {
|
||||
global.httpsAgent = tunnel.httpsOverHttp({
|
||||
proxy: {
|
||||
host: process.env.AXIOS_PROXY_HOST,
|
||||
port: +process.env.AXIOS_PROXY_PORT
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
global.mongodb = 'connecting';
|
||||
try {
|
||||
mongoose.set('strictQuery', true);
|
||||
@@ -27,20 +48,8 @@ export async function connectToDatabase(): Promise<void> {
|
||||
global.mongodb = null;
|
||||
}
|
||||
|
||||
// 创建代理对象
|
||||
if (process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT) {
|
||||
global.httpsAgent = tunnel.httpsOverHttp({
|
||||
proxy: {
|
||||
host: process.env.AXIOS_PROXY_HOST,
|
||||
port: +process.env.AXIOS_PROXY_PORT
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// 初始化队列
|
||||
global.qaQueueLen = 0;
|
||||
global.vectorQueueLen = 0;
|
||||
|
||||
// init function
|
||||
updateSystemEnv();
|
||||
startQueue();
|
||||
}
|
||||
|
||||
@@ -57,3 +66,4 @@ export * from './models/collection';
|
||||
export * from './models/shareChat';
|
||||
export * from './models/kb';
|
||||
export * from './models/inform';
|
||||
export * from './models/system';
|
||||
|
@@ -6,14 +6,13 @@ export const connectPg = async () => {
|
||||
return global.pgClient;
|
||||
}
|
||||
|
||||
const maxLink = Number(process.env.VECTOR_MAX_PROCESS || 10);
|
||||
global.pgClient = new Pool({
|
||||
host: process.env.PG_HOST,
|
||||
port: process.env.PG_PORT ? +process.env.PG_PORT : 5432,
|
||||
user: process.env.PG_USER,
|
||||
password: process.env.PG_PASSWORD,
|
||||
database: process.env.PG_DB_NAME,
|
||||
max: Math.floor(maxLink * 0.5),
|
||||
max: 80,
|
||||
idleTimeoutMillis: 60000,
|
||||
connectionTimeoutMillis: 20000
|
||||
});
|
||||
|
@@ -123,14 +123,21 @@ export const authUser = async ({
|
||||
export const getSystemOpenAiKey = (type: ApiKeyType) => {
|
||||
const keys = (() => {
|
||||
if (type === 'training') {
|
||||
return process.env.OPENAI_TRAINING_KEY?.split(',') || [];
|
||||
return global.systemEnv.openAITrainingKeys?.split(',') || [];
|
||||
}
|
||||
return process.env.OPENAIKEY?.split(',') || [];
|
||||
return global.systemEnv.openAIKeys?.split(',') || [];
|
||||
})();
|
||||
|
||||
// 纯字符串类型
|
||||
const i = Math.floor(Math.random() * keys.length);
|
||||
return keys[i] || (process.env.OPENAIKEY as string);
|
||||
return keys[i] || (global.systemEnv.openAIKeys as string);
|
||||
};
|
||||
export const getGpt4Key = () => {
|
||||
const keys = global.systemEnv.gpt4Key?.split(',') || [];
|
||||
|
||||
// 纯字符串类型
|
||||
const i = Math.floor(Math.random() * keys.length);
|
||||
return keys[i] || (global.systemEnv.openAIKeys as string);
|
||||
};
|
||||
|
||||
/* 获取 api 请求的 key */
|
||||
@@ -157,11 +164,11 @@ export const getApiKey = async ({
|
||||
},
|
||||
[OpenAiChatEnum.GPT4]: {
|
||||
userOpenAiKey: user.openaiKey || '',
|
||||
systemAuthKey: process.env.GPT4KEY as string
|
||||
systemAuthKey: getGpt4Key() as string
|
||||
},
|
||||
[OpenAiChatEnum.GPT432k]: {
|
||||
userOpenAiKey: user.openaiKey || '',
|
||||
systemAuthKey: process.env.GPT4KEY as string
|
||||
systemAuthKey: getGpt4Key() as string
|
||||
},
|
||||
[ClaudeEnum.Claude]: {
|
||||
userOpenAiKey: '',
|
||||
|
@@ -60,13 +60,10 @@ export function withNextCors(handler: NextApiHandler): NextApiHandler {
|
||||
}
|
||||
|
||||
export const startQueue = () => {
|
||||
const qaMax = Number(process.env.QA_MAX_PROCESS || 10);
|
||||
const vectorMax = Number(process.env.VECTOR_MAX_PROCESS || 10);
|
||||
|
||||
for (let i = 0; i < qaMax; i++) {
|
||||
for (let i = 0; i < global.systemEnv.qaMaxProcess; i++) {
|
||||
generateQA();
|
||||
}
|
||||
for (let i = 0; i < vectorMax; i++) {
|
||||
for (let i = 0; i < global.systemEnv.vectorMaxProcess; i++) {
|
||||
generateVector();
|
||||
}
|
||||
};
|
||||
|
26
client/src/types/index.d.ts
vendored
26
client/src/types/index.d.ts
vendored
@@ -3,6 +3,15 @@ import type { Agent } from 'http';
|
||||
import type { Pool } from 'pg';
|
||||
import type { Tiktoken } from '@dqbd/tiktoken';
|
||||
|
||||
export type PagingData<T> = {
|
||||
pageNum: number;
|
||||
pageSize: number;
|
||||
data: T[];
|
||||
total?: number;
|
||||
};
|
||||
|
||||
export type RequestPaging = { pageNum: number; pageSize: number; [key]: any };
|
||||
|
||||
declare global {
|
||||
var mongodb: Mongoose | string | null;
|
||||
var pgClient: Pool | null;
|
||||
@@ -13,17 +22,16 @@ declare global {
|
||||
var qaQueueLen: number;
|
||||
var vectorQueueLen: number;
|
||||
var OpenAiEncMap: Record<string, Tiktoken>;
|
||||
var systemEnv: {
|
||||
openAIKeys: string;
|
||||
openAITrainingKeys: string;
|
||||
gpt4Key: string;
|
||||
vectorMaxProcess: number;
|
||||
qaMaxProcess: number;
|
||||
sensitiveCheck: boolean;
|
||||
};
|
||||
|
||||
interface Window {
|
||||
['pdfjs-dist/build/pdf']: any;
|
||||
}
|
||||
}
|
||||
|
||||
export type PagingData<T> = {
|
||||
pageNum: number;
|
||||
pageSize: number;
|
||||
data: T[];
|
||||
total?: number;
|
||||
};
|
||||
|
||||
export type RequestPaging = { pageNum: number; pageSize: number; [key]: any };
|
||||
|
Reference in New Issue
Block a user