mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
V4.9.4 feature (#4470)
* Training status (#4424) * dataset data training state (#4311) * dataset data training state * fix * fix ts * fix * fix api format * fix * fix * perf: count training * format * fix: dataset training state (#4417) * fix * add test * fix * fix * fix test * fix test * perf: training count * count * loading status --------- Co-authored-by: heheer <heheer@sealos.io> * doc * website sync feature (#4429) * perf: introduce BullMQ for website sync (#4403) * perf: introduce BullMQ for website sync * feat: new redis module * fix: remove graceful shutdown * perf: improve UI in dataset detail - Updated the "change" icon SVG file. - Modified i18n strings. - Added new i18n string "immediate_sync". - Improved UI in dataset detail page, including button icons and background colors. * refactor: Add chunkSettings to DatasetSchema * perf: website sync ux * env template * fix: clean up website dataset when updating chunk settings (#4420) * perf: check setting updated * perf: worker currency * feat: init script for website sync refactor (#4425) * website feature doc --------- Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com> * pro migration (#4388) (#4433) * pro migration * reuse customPdfParseType Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com> * perf: remove loading ui * feat: config chat file expired time * Redis cache (#4436) * perf: add Redis cache for vector counting (#4432) * feat: cache * perf: get cache key --------- Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com> * perf: mobile voice input (#4437) * update:Mobile voice interaction (#4362) * Add files via upload * Add files via upload * Update ollama.md * Update ollama.md * Add files via upload * Update useSpeech.ts * Update ChatInput.tsx * Update useSpeech.ts * Update ChatInput.tsx * Update useSpeech.ts * Update constants.ts * Add files via upload * Update ChatInput.tsx * Update useSpeech.ts * Update useSpeech.ts * Update useSpeech.ts * Update ChatInput.tsx * Add files via upload * Update common.json * Update VoiceInput.tsx * Update ChatInput.tsx * Update VoiceInput.tsx * Update useSpeech.ts * Update useSpeech.ts * Update common.json * Update common.json * Update common.json * Update VoiceInput.tsx * Update VoiceInput.tsx * Update ChatInput.tsx * Update VoiceInput.tsx * Update ChatInput.tsx * Update VoiceInput.tsx * Update ChatInput.tsx * Update useSpeech.ts * Update common.json * Update chat.json * Update common.json * Update chat.json * Update common.json * Update chat.json * Update VoiceInput.tsx * Update ChatInput.tsx * Update useSpeech.ts * Update VoiceInput.tsx * speech ui * 优化语音输入组件,调整输入框显示逻辑,修复语音输入遮罩层样式,更新画布背景透明度,增强用户交互体验。 (#4435) * perf: mobil voice input --------- Co-authored-by: dreamer6680 <1468683855@qq.com> * Test completion v2 (#4438) * add v2 completions (#4364) * add v2 completions * completion config * config version * fix * frontend * doc * fix * fix: completions v2 api --------- Co-authored-by: heheer <heheer@sealos.io> * package * Test mongo log (#4443) * feat: mongodb-log (#4426) * perf: mongo log * feat: completions stop reasoner * mongo db log --------- Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com> * update doc * Update doc * fix external var ui (#4444) * action * fix: ts (#4458) * preview doc action add docs preview permission update preview action udpate action * update doc (#4460) * update preview action * update doc * remove * update * schema * update mq export;perf: redis cache (#4465) * perf: redis cache * update mq export * perf: website sync error tip * add error worker * website sync ui (#4466) * Updated the dynamic display of the voice input pop-up (#4469) * Update VoiceInput.tsx * Update VoiceInput.tsx * Update VoiceInput.tsx * fix: voice input --------- Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com> Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com> Co-authored-by: dreamer6680 <1468683855@qq.com> Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
import { connectionMongo, getMongoModel } from '../../mongo';
|
||||
const { Schema } = connectionMongo;
|
||||
import { getMongoModel, Schema } from '../../mongo';
|
||||
import { RawTextBufferSchemaType } from './type';
|
||||
|
||||
export const collectionName = 'buffer_rawtexts';
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { Schema, getMongoModel } from '../../../common/mongo';
|
||||
import { TTSBufferSchemaType } from './type.d';
|
||||
|
||||
export const collectionName = 'buffer_tts';
|
||||
|
79
packages/service/common/bullmq/index.ts
Normal file
79
packages/service/common/bullmq/index.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { ConnectionOptions, Processor, Queue, QueueOptions, Worker, WorkerOptions } from 'bullmq';
|
||||
import { addLog } from '../system/log';
|
||||
import { newQueueRedisConnection, newWorkerRedisConnection } from '../redis';
|
||||
|
||||
const defaultWorkerOpts: Omit<ConnectionOptions, 'connection'> = {
|
||||
removeOnComplete: {
|
||||
count: 0 // Delete jobs immediately on completion
|
||||
},
|
||||
removeOnFail: {
|
||||
count: 0 // Delete jobs immediately on failure
|
||||
}
|
||||
};
|
||||
|
||||
export enum QueueNames {
|
||||
websiteSync = 'websiteSync'
|
||||
}
|
||||
|
||||
export const queues = (() => {
|
||||
if (!global.queues) {
|
||||
global.queues = new Map<QueueNames, Queue>();
|
||||
}
|
||||
return global.queues;
|
||||
})();
|
||||
export const workers = (() => {
|
||||
if (!global.workers) {
|
||||
global.workers = new Map<QueueNames, Worker>();
|
||||
}
|
||||
return global.workers;
|
||||
})();
|
||||
|
||||
export function getQueue<DataType, ReturnType = void>(
|
||||
name: QueueNames,
|
||||
opts?: Omit<QueueOptions, 'connection'>
|
||||
): Queue<DataType, ReturnType> {
|
||||
// check if global.queues has the queue
|
||||
const queue = queues.get(name);
|
||||
if (queue) {
|
||||
return queue as Queue<DataType, ReturnType>;
|
||||
}
|
||||
const newQueue = new Queue<DataType, ReturnType>(name.toString(), {
|
||||
connection: newQueueRedisConnection(),
|
||||
...opts
|
||||
});
|
||||
|
||||
// default error handler, to avoid unhandled exceptions
|
||||
newQueue.on('error', (error) => {
|
||||
addLog.error(`MQ Queue [${name}]: ${error.message}`, error);
|
||||
});
|
||||
queues.set(name, newQueue);
|
||||
return newQueue;
|
||||
}
|
||||
|
||||
export function getWorker<DataType, ReturnType = void>(
|
||||
name: QueueNames,
|
||||
processor: Processor<DataType, ReturnType>,
|
||||
opts?: Omit<WorkerOptions, 'connection'>
|
||||
): Worker<DataType, ReturnType> {
|
||||
const worker = workers.get(name);
|
||||
if (worker) {
|
||||
return worker as Worker<DataType, ReturnType>;
|
||||
}
|
||||
|
||||
const newWorker = new Worker<DataType, ReturnType>(name.toString(), processor, {
|
||||
connection: newWorkerRedisConnection(),
|
||||
...defaultWorkerOpts,
|
||||
...opts
|
||||
});
|
||||
// default error handler, to avoid unhandled exceptions
|
||||
newWorker.on('error', (error) => {
|
||||
addLog.error(`MQ Worker [${name}]: ${error.message}`, error);
|
||||
});
|
||||
newWorker.on('failed', (jobId, error) => {
|
||||
addLog.error(`MQ Worker [${name}]: ${error.message}`, error);
|
||||
});
|
||||
workers.set(name, newWorker);
|
||||
return newWorker;
|
||||
}
|
||||
|
||||
export * from 'bullmq';
|
7
packages/service/common/bullmq/type.d.ts
vendored
Normal file
7
packages/service/common/bullmq/type.d.ts
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
import { Queue, Worker } from 'bullmq';
|
||||
import { QueueNames } from './index';
|
||||
|
||||
declare global {
|
||||
var queues: Map<QueueNames, Queue> | undefined;
|
||||
var workers: Map<QueueNames, Worker> | undefined;
|
||||
}
|
@@ -1,5 +1,4 @@
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../mongo';
|
||||
const { Schema } = connectionMongo;
|
||||
import { Schema, getMongoModel } from '../../mongo';
|
||||
|
||||
const DatasetFileSchema = new Schema({});
|
||||
const ChatFileSchema = new Schema({});
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
|
||||
import { connectionMongo, getMongoModel } from '../../mongo';
|
||||
import { Schema, getMongoModel } from '../../mongo';
|
||||
import { MongoImageSchemaType } from '@fastgpt/global/common/file/image/type.d';
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
const ImageSchema = new Schema({
|
||||
teamId: {
|
||||
|
@@ -1,17 +1,26 @@
|
||||
import { addLog } from '../../common/system/log';
|
||||
import mongoose, { Model } from 'mongoose';
|
||||
import mongoose, { Model, Mongoose } from 'mongoose';
|
||||
|
||||
export default mongoose;
|
||||
export * from 'mongoose';
|
||||
|
||||
export const MONGO_URL = process.env.MONGODB_URI as string;
|
||||
export const MONGO_LOG_URL = (process.env.MONGODB_LOG_URI ?? process.env.MONGODB_URI) as string;
|
||||
|
||||
export const connectionMongo = (() => {
|
||||
if (!global.mongodb) {
|
||||
global.mongodb = mongoose;
|
||||
global.mongodb = new Mongoose();
|
||||
}
|
||||
|
||||
return global.mongodb;
|
||||
})();
|
||||
|
||||
export const connectionLogMongo = (() => {
|
||||
if (!global.mongodbLog) {
|
||||
global.mongodbLog = new Mongoose();
|
||||
}
|
||||
return global.mongodbLog;
|
||||
})();
|
||||
|
||||
const addCommonMiddleware = (schema: mongoose.Schema) => {
|
||||
const operations = [
|
||||
/^find/,
|
||||
@@ -71,6 +80,19 @@ export const getMongoModel = <T>(name: string, schema: mongoose.Schema) => {
|
||||
return model;
|
||||
};
|
||||
|
||||
export const getMongoLogModel = <T>(name: string, schema: mongoose.Schema) => {
|
||||
if (connectionLogMongo.models[name]) return connectionLogMongo.models[name] as Model<T>;
|
||||
console.log('Load model======', name);
|
||||
addCommonMiddleware(schema);
|
||||
|
||||
const model = connectionLogMongo.model<T>(name, schema);
|
||||
|
||||
// Sync index
|
||||
syncMongoIndex(model);
|
||||
|
||||
return model;
|
||||
};
|
||||
|
||||
const syncMongoIndex = async (model: Model<any>) => {
|
||||
if (process.env.SYNC_INDEX !== '0' && process.env.NODE_ENV !== 'test') {
|
||||
try {
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { addLog } from '../system/log';
|
||||
import { connectionMongo } from './index';
|
||||
import type { Mongoose } from 'mongoose';
|
||||
|
||||
const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
|
||||
@@ -8,41 +7,41 @@ const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
|
||||
/**
|
||||
* connect MongoDB and init data
|
||||
*/
|
||||
export async function connectMongo(): Promise<Mongoose> {
|
||||
export async function connectMongo(db: Mongoose, url: string): Promise<Mongoose> {
|
||||
/* Connecting, connected will return */
|
||||
if (connectionMongo.connection.readyState !== 0) {
|
||||
return connectionMongo;
|
||||
if (db.connection.readyState !== 0) {
|
||||
return db;
|
||||
}
|
||||
|
||||
console.log('mongo start connect');
|
||||
console.log('MongoDB start connect');
|
||||
try {
|
||||
// Remove existing listeners to prevent duplicates
|
||||
connectionMongo.connection.removeAllListeners('error');
|
||||
connectionMongo.connection.removeAllListeners('disconnected');
|
||||
connectionMongo.set('strictQuery', 'throw');
|
||||
db.connection.removeAllListeners('error');
|
||||
db.connection.removeAllListeners('disconnected');
|
||||
db.set('strictQuery', 'throw');
|
||||
|
||||
connectionMongo.connection.on('error', async (error) => {
|
||||
db.connection.on('error', async (error) => {
|
||||
console.log('mongo error', error);
|
||||
try {
|
||||
if (connectionMongo.connection.readyState !== 0) {
|
||||
await connectionMongo.disconnect();
|
||||
if (db.connection.readyState !== 0) {
|
||||
await db.disconnect();
|
||||
await delay(1000);
|
||||
await connectMongo();
|
||||
await connectMongo(db, url);
|
||||
}
|
||||
} catch (error) {}
|
||||
});
|
||||
connectionMongo.connection.on('disconnected', async () => {
|
||||
db.connection.on('disconnected', async () => {
|
||||
console.log('mongo disconnected');
|
||||
try {
|
||||
if (connectionMongo.connection.readyState !== 0) {
|
||||
await connectionMongo.disconnect();
|
||||
if (db.connection.readyState !== 0) {
|
||||
await db.disconnect();
|
||||
await delay(1000);
|
||||
await connectMongo();
|
||||
await connectMongo(db, url);
|
||||
}
|
||||
} catch (error) {}
|
||||
});
|
||||
|
||||
await connectionMongo.connect(process.env.MONGODB_URI as string, {
|
||||
const options = {
|
||||
bufferCommands: true,
|
||||
maxConnecting: maxConnecting,
|
||||
maxPoolSize: maxConnecting,
|
||||
@@ -53,18 +52,18 @@ export async function connectMongo(): Promise<Mongoose> {
|
||||
maxIdleTimeMS: 300000,
|
||||
retryWrites: true,
|
||||
retryReads: true
|
||||
};
|
||||
|
||||
// readPreference: 'secondaryPreferred',
|
||||
// readConcern: { level: 'local' },
|
||||
// writeConcern: { w: 'majority', j: true }
|
||||
});
|
||||
db.connect(url, options);
|
||||
|
||||
console.log('mongo connected');
|
||||
return connectionMongo;
|
||||
return db;
|
||||
} catch (error) {
|
||||
addLog.error('mongo connect error', error);
|
||||
await connectionMongo.disconnect();
|
||||
addLog.error('Mongo connect error', error);
|
||||
|
||||
await db.disconnect();
|
||||
|
||||
await delay(1000);
|
||||
return connectMongo();
|
||||
return connectMongo(db, url);
|
||||
}
|
||||
}
|
||||
|
1
packages/service/common/mongo/type.d.ts
vendored
1
packages/service/common/mongo/type.d.ts
vendored
@@ -3,4 +3,5 @@ import type { Logger } from 'winston';
|
||||
|
||||
declare global {
|
||||
var mongodb: Mongoose | undefined;
|
||||
var mongodbLog: Mongoose | undefined;
|
||||
}
|
||||
|
38
packages/service/common/redis/cache.ts
Normal file
38
packages/service/common/redis/cache.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { getGlobalRedisCacheConnection } from './index';
|
||||
import { addLog } from '../system/log';
|
||||
import { retryFn } from '@fastgpt/global/common/system/utils';
|
||||
|
||||
export enum CacheKeyEnum {
|
||||
team_vector_count = 'team_vector_count'
|
||||
}
|
||||
|
||||
export const setRedisCache = async (
|
||||
key: string,
|
||||
data: string | Buffer | number,
|
||||
expireSeconds?: number
|
||||
) => {
|
||||
return await retryFn(async () => {
|
||||
try {
|
||||
const redis = getGlobalRedisCacheConnection();
|
||||
|
||||
if (expireSeconds) {
|
||||
await redis.set(key, data, 'EX', expireSeconds);
|
||||
} else {
|
||||
await redis.set(key, data);
|
||||
}
|
||||
} catch (error) {
|
||||
addLog.error('Set cache error:', error);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export const getRedisCache = async (key: string) => {
|
||||
const redis = getGlobalRedisCacheConnection();
|
||||
return await retryFn(() => redis.get(key));
|
||||
};
|
||||
|
||||
export const delRedisCache = async (key: string) => {
|
||||
const redis = getGlobalRedisCacheConnection();
|
||||
await retryFn(() => redis.del(key));
|
||||
};
|
43
packages/service/common/redis/index.ts
Normal file
43
packages/service/common/redis/index.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { addLog } from '../system/log';
|
||||
import Redis from 'ioredis';
|
||||
|
||||
const REDIS_URL = process.env.REDIS_URL ?? 'redis://localhost:6379';
|
||||
|
||||
export const newQueueRedisConnection = () => {
|
||||
const redis = new Redis(REDIS_URL);
|
||||
redis.on('connect', () => {
|
||||
console.log('Redis connected');
|
||||
});
|
||||
redis.on('error', (error) => {
|
||||
console.error('Redis connection error', error);
|
||||
});
|
||||
return redis;
|
||||
};
|
||||
|
||||
export const newWorkerRedisConnection = () => {
|
||||
const redis = new Redis(REDIS_URL, {
|
||||
maxRetriesPerRequest: null
|
||||
});
|
||||
redis.on('connect', () => {
|
||||
console.log('Redis connected');
|
||||
});
|
||||
redis.on('error', (error) => {
|
||||
console.error('Redis connection error', error);
|
||||
});
|
||||
return redis;
|
||||
};
|
||||
|
||||
export const getGlobalRedisCacheConnection = () => {
|
||||
if (global.redisCache) return global.redisCache;
|
||||
|
||||
global.redisCache = new Redis(REDIS_URL, { keyPrefix: 'fastgpt:cache:' });
|
||||
|
||||
global.redisCache.on('connect', () => {
|
||||
addLog.info('Redis connected');
|
||||
});
|
||||
global.redisCache.on('error', (error) => {
|
||||
addLog.error('Redis connection error', error);
|
||||
});
|
||||
|
||||
return global.redisCache;
|
||||
};
|
5
packages/service/common/redis/type.d.ts
vendored
Normal file
5
packages/service/common/redis/type.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import Redis from 'ioredis';
|
||||
|
||||
declare global {
|
||||
var redisCache: Redis | null;
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
import { getMongoModel, Schema } from '../../../common/mongo';
|
||||
import { getMongoLogModel as getMongoModel, Schema } from '../../../common/mongo';
|
||||
import { SystemLogType } from './type';
|
||||
import { LogLevelEnum } from './constant';
|
||||
|
||||
|
@@ -1,5 +1,5 @@
|
||||
export enum TimerIdEnum {
|
||||
checkInValidDatasetFiles = 'checkInValidDatasetFiles',
|
||||
checkExpiredFiles = 'checkExpiredFiles',
|
||||
checkInvalidDatasetData = 'checkInvalidDatasetData',
|
||||
checkInvalidVector = 'checkInvalidVector',
|
||||
clearExpiredSubPlan = 'clearExpiredSubPlan',
|
||||
|
@@ -2,10 +2,12 @@
|
||||
import { PgVectorCtrl } from './pg/class';
|
||||
import { ObVectorCtrl } from './oceanbase/class';
|
||||
import { getVectorsByText } from '../../core/ai/embedding';
|
||||
import { InsertVectorProps } from './controller.d';
|
||||
import { DelDatasetVectorCtrlProps, InsertVectorProps } from './controller.d';
|
||||
import { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
|
||||
import { MILVUS_ADDRESS, PG_ADDRESS, OCEANBASE_ADDRESS } from './constants';
|
||||
import { MilvusCtrl } from './milvus/class';
|
||||
import { setRedisCache, getRedisCache, delRedisCache, CacheKeyEnum } from '../redis/cache';
|
||||
import { throttle } from 'lodash';
|
||||
|
||||
const getVectorObj = () => {
|
||||
if (PG_ADDRESS) return new PgVectorCtrl();
|
||||
@@ -15,13 +17,33 @@ const getVectorObj = () => {
|
||||
return new PgVectorCtrl();
|
||||
};
|
||||
|
||||
const getChcheKey = (teamId: string) => `${CacheKeyEnum.team_vector_count}:${teamId}`;
|
||||
const onDelCache = throttle((teamId: string) => delRedisCache(getChcheKey(teamId)), 30000, {
|
||||
leading: true,
|
||||
trailing: true
|
||||
});
|
||||
|
||||
const Vector = getVectorObj();
|
||||
|
||||
export const initVectorStore = Vector.init;
|
||||
export const deleteDatasetDataVector = Vector.delete;
|
||||
export const recallFromVectorStore = Vector.embRecall;
|
||||
export const getVectorDataByTime = Vector.getVectorDataByTime;
|
||||
export const getVectorCountByTeamId = Vector.getVectorCountByTeamId;
|
||||
|
||||
export const getVectorCountByTeamId = async (teamId: string) => {
|
||||
const key = getChcheKey(teamId);
|
||||
|
||||
const countStr = await getRedisCache(key);
|
||||
if (countStr) {
|
||||
return Number(countStr);
|
||||
}
|
||||
|
||||
const count = await Vector.getVectorCountByTeamId(teamId);
|
||||
|
||||
await setRedisCache(key, count, 30 * 60);
|
||||
|
||||
return count;
|
||||
};
|
||||
|
||||
export const getVectorCountByDatasetId = Vector.getVectorCountByDatasetId;
|
||||
export const getVectorCountByCollectionId = Vector.getVectorCountByCollectionId;
|
||||
|
||||
@@ -43,8 +65,16 @@ export const insertDatasetDataVector = async ({
|
||||
vector: vectors[0]
|
||||
});
|
||||
|
||||
onDelCache(props.teamId);
|
||||
|
||||
return {
|
||||
tokens,
|
||||
insertId
|
||||
};
|
||||
};
|
||||
|
||||
export const deleteDatasetDataVector = async (props: DelDatasetVectorCtrlProps) => {
|
||||
const result = await Vector.delete(props);
|
||||
onDelCache(props.teamId);
|
||||
return result;
|
||||
};
|
||||
|
Reference in New Issue
Block a user