mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-24 22:03:54 +00:00
V4.9.11 feature (#4969)
* Feat: Images dataset collection (#4941) * New pic (#4858) * 更新数据集相关类型,添加图像文件ID和预览URL支持;优化数据集导入功能,新增图像数据集处理组件;修复部分国际化文本;更新文件上传逻辑以支持新功能。 * 与原先代码的差别 * 新增 V4.9.10 更新说明,支持 PG 设置`systemEnv.hnswMaxScanTuples`参数,优化 LLM stream 调用超时,修复全文检索多知识库排序问题。同时更新数据集索引,移除 datasetId 字段以简化查询。 * 更换成fileId_image逻辑,并增加训练队列匹配的逻辑 * 新增图片集合判断逻辑,优化预览URL生成流程,确保仅在数据集为图片集合时生成预览URL,并添加相关日志输出以便调试。 * Refactor Docker Compose configuration to comment out exposed ports for production environments, update image versions for pgvector, fastgpt, and mcp_server, and enhance Redis service with a health check. Additionally, standardize dataset collection labels in constants and improve internationalization strings across multiple languages. * Enhance TrainingStates component by adding internationalization support for the imageParse training mode and update defaultCounts to include imageParse mode in trainingDetail API. * Enhance dataset import context by adding additional steps for image dataset import process and improve internationalization strings for modal buttons in the useEditTitle hook. * Update DatasetImportContext to conditionally render MyStep component based on data source type, improving the import process for non-image datasets. * Refactor image dataset handling by improving internationalization strings, enhancing error messages, and streamlining the preview URL generation process. * 图片上传到新建的 dataset_collection_images 表,逻辑跟随更改 * 修改了除了controller的其他部分问题 * 把图片数据集的逻辑整合到controller里面 * 补充i18n * 补充i18n * resolve评论:主要是上传逻辑的更改和组件复用 * 图片名称的图标显示 * 修改编译报错的命名问题 * 删除不需要的collectionid部分 * 多余文件的处理和改动一个删除按钮 * 除了loading和统一的imageId,其他都resolve掉的 * 处理图标报错 * 复用了MyPhotoView并采用全部替换的方式将imageFileId变成imageId * 去除不必要文件修改 * 报错和字段修改 * 增加上传成功后删除临时文件的逻辑以及回退一些修改 * 删除path字段,将图片保存到gridfs内,并修改增删等操作的代码 * 修正编译错误 --------- Co-authored-by: archer <545436317@qq.com> * perf: image dataset * feat: insert image * perf: image icon * fix: training state --------- Co-authored-by: Zhuangzai fa <143257420+ctrlz526@users.noreply.github.com> * fix: ts (#4948) * Thirddatasetmd (#4942) * add thirddataset.md * fix thirddataset.md * fix * delete wrong png --------- Co-authored-by: dreamer6680 <146868355@qq.com> * perf: api dataset code * perf: log * add secondary.tsx (#4946) * add secondary.tsx * fix --------- Co-authored-by: dreamer6680 <146868355@qq.com> * perf: multiple menu * perf: i18n * feat: parse queue (#4960) * feat: parse queue * feat: sync parse queue * fix thirddataset.md (#4962) * fix thirddataset-4.png (#4963) * feat: Dataset template import (#4934) * 模版导入部分除了文档还没写 * 修复模版导入的 build 错误 * Document production * compress pictures * Change some constants to variables --------- Co-authored-by: Archer <545436317@qq.com> * perf: template import * doc * llm pargraph * bocha tool * fix: del collection --------- Co-authored-by: Zhuangzai fa <143257420+ctrlz526@users.noreply.github.com> Co-authored-by: dreamer6680 <1468683855@qq.com> Co-authored-by: dreamer6680 <146868355@qq.com>
This commit is contained in:
166
packages/service/core/dataset/image/controller.ts
Normal file
166
packages/service/core/dataset/image/controller.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { addMinutes } from 'date-fns';
|
||||
import { bucketName, MongoDatasetImageSchema } from './schema';
|
||||
import { connectionMongo, Types } from '../../../common/mongo';
|
||||
import fs from 'fs';
|
||||
import type { FileType } from '../../../common/file/multer';
|
||||
import fsp from 'fs/promises';
|
||||
import { computeGridFsChunSize } from '../../../common/file/gridfs/utils';
|
||||
import { setCron } from '../../../common/system/cron';
|
||||
import { checkTimerLock } from '../../../common/system/timerLock/utils';
|
||||
import { TimerIdEnum } from '../../../common/system/timerLock/constants';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
|
||||
const getGridBucket = () => {
|
||||
return new connectionMongo.mongo.GridFSBucket(connectionMongo.connection.db!, {
|
||||
bucketName: bucketName
|
||||
});
|
||||
};
|
||||
|
||||
export const createDatasetImage = async ({
|
||||
teamId,
|
||||
datasetId,
|
||||
file,
|
||||
expiredTime = addMinutes(new Date(), 30)
|
||||
}: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
file: FileType;
|
||||
expiredTime?: Date;
|
||||
}): Promise<{ imageId: string; previewUrl: string }> => {
|
||||
const path = file.path;
|
||||
const gridBucket = getGridBucket();
|
||||
const metadata = {
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
expiredTime
|
||||
};
|
||||
|
||||
const stats = await fsp.stat(path);
|
||||
if (!stats.isFile()) return Promise.reject(`${path} is not a file`);
|
||||
|
||||
const readStream = fs.createReadStream(path, {
|
||||
highWaterMark: 256 * 1024
|
||||
});
|
||||
const chunkSizeBytes = computeGridFsChunSize(stats.size);
|
||||
|
||||
const stream = gridBucket.openUploadStream(file.originalname, {
|
||||
metadata,
|
||||
contentType: file.mimetype,
|
||||
chunkSizeBytes
|
||||
});
|
||||
|
||||
// save to gridfs
|
||||
await new Promise((resolve, reject) => {
|
||||
readStream
|
||||
.pipe(stream as any)
|
||||
.on('finish', resolve)
|
||||
.on('error', reject);
|
||||
});
|
||||
|
||||
return {
|
||||
imageId: String(stream.id),
|
||||
previewUrl: ''
|
||||
};
|
||||
};
|
||||
|
||||
export const getDatasetImageReadData = async (imageId: string) => {
|
||||
// Get file metadata to get contentType
|
||||
const fileInfo = await MongoDatasetImageSchema.findOne({
|
||||
_id: new Types.ObjectId(imageId)
|
||||
}).lean();
|
||||
if (!fileInfo) {
|
||||
return Promise.reject('Image not found');
|
||||
}
|
||||
|
||||
const gridBucket = getGridBucket();
|
||||
return {
|
||||
stream: gridBucket.openDownloadStream(new Types.ObjectId(imageId)),
|
||||
fileInfo
|
||||
};
|
||||
};
|
||||
export const getDatasetImageBase64 = async (imageId: string) => {
|
||||
// Get file metadata to get contentType
|
||||
const fileInfo = await MongoDatasetImageSchema.findOne({
|
||||
_id: new Types.ObjectId(imageId)
|
||||
}).lean();
|
||||
if (!fileInfo) {
|
||||
return Promise.reject('Image not found');
|
||||
}
|
||||
|
||||
// Get image stream from GridFS
|
||||
const { stream } = await getDatasetImageReadData(imageId);
|
||||
|
||||
// Convert stream to buffer
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
stream.on('data', (chunk: Buffer) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
// Combine all chunks into a single buffer
|
||||
const buffer = Buffer.concat(chunks);
|
||||
// Convert buffer to base64 string
|
||||
const base64 = buffer.toString('base64');
|
||||
const dataUrl = `data:${fileInfo.contentType || 'image/jpeg'};base64,${base64}`;
|
||||
resolve(dataUrl);
|
||||
});
|
||||
|
||||
stream.on('error', reject);
|
||||
});
|
||||
};
|
||||
|
||||
export const deleteDatasetImage = async (imageId: string) => {
|
||||
const gridBucket = getGridBucket();
|
||||
|
||||
try {
|
||||
await gridBucket.delete(new Types.ObjectId(imageId));
|
||||
} catch (error: any) {
|
||||
const msg = error?.message;
|
||||
if (msg.includes('File not found')) {
|
||||
addLog.warn('Delete dataset image error', error);
|
||||
return;
|
||||
} else {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const clearExpiredDatasetImageCron = async () => {
|
||||
const gridBucket = getGridBucket();
|
||||
const clearExpiredDatasetImages = async () => {
|
||||
addLog.debug('Clear expired dataset image start');
|
||||
|
||||
const data = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.expiredTime': { $lt: new Date() }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
|
||||
for (const item of data) {
|
||||
try {
|
||||
await gridBucket.delete(item._id);
|
||||
} catch (error) {
|
||||
addLog.error('Delete expired dataset image error', error);
|
||||
}
|
||||
}
|
||||
addLog.debug('Clear expired dataset image end');
|
||||
};
|
||||
|
||||
setCron('*/10 * * * *', async () => {
|
||||
if (
|
||||
await checkTimerLock({
|
||||
timerId: TimerIdEnum.clearExpiredDatasetImage,
|
||||
lockMinuted: 9
|
||||
})
|
||||
) {
|
||||
try {
|
||||
await clearExpiredDatasetImages();
|
||||
} catch (error) {
|
||||
addLog.error('clearExpiredDatasetImageCron error', error);
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
36
packages/service/core/dataset/image/schema.ts
Normal file
36
packages/service/core/dataset/image/schema.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import type { Types } from '../../../common/mongo';
|
||||
import { getMongoModel, Schema } from '../../../common/mongo';
|
||||
|
||||
export const bucketName = 'dataset_image';
|
||||
|
||||
const MongoDatasetImage = new Schema({
|
||||
length: { type: Number, required: true },
|
||||
chunkSize: { type: Number, required: true },
|
||||
uploadDate: { type: Date, required: true },
|
||||
filename: { type: String, required: true },
|
||||
contentType: { type: String, required: true },
|
||||
metadata: {
|
||||
teamId: { type: String, required: true },
|
||||
datasetId: { type: String, required: true },
|
||||
collectionId: { type: String },
|
||||
expiredTime: { type: Date, required: true }
|
||||
}
|
||||
});
|
||||
MongoDatasetImage.index({ 'metadata.datasetId': 'hashed' });
|
||||
MongoDatasetImage.index({ 'metadata.collectionId': 'hashed' });
|
||||
MongoDatasetImage.index({ 'metadata.expiredTime': -1 });
|
||||
|
||||
export const MongoDatasetImageSchema = getMongoModel<{
|
||||
_id: Types.ObjectId;
|
||||
length: number;
|
||||
chunkSize: number;
|
||||
uploadDate: Date;
|
||||
filename: string;
|
||||
contentType: string;
|
||||
metadata: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
expiredTime: Date;
|
||||
};
|
||||
}>(`${bucketName}.files`, MongoDatasetImage);
|
103
packages/service/core/dataset/image/utils.ts
Normal file
103
packages/service/core/dataset/image/utils.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
|
||||
import { Types, type ClientSession } from '../../../common/mongo';
|
||||
import { deleteDatasetImage } from './controller';
|
||||
import { MongoDatasetImageSchema } from './schema';
|
||||
import { addMinutes } from 'date-fns';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
export const removeDatasetImageExpiredTime = async ({
|
||||
ids = [],
|
||||
collectionId,
|
||||
session
|
||||
}: {
|
||||
ids?: string[];
|
||||
collectionId: string;
|
||||
session?: ClientSession;
|
||||
}) => {
|
||||
if (ids.length === 0) return;
|
||||
return MongoDatasetImageSchema.updateMany(
|
||||
{
|
||||
_id: {
|
||||
$in: ids
|
||||
.filter((id) => Types.ObjectId.isValid(id))
|
||||
.map((id) => (typeof id === 'string' ? new Types.ObjectId(id) : id))
|
||||
}
|
||||
},
|
||||
{
|
||||
$unset: { 'metadata.expiredTime': '' },
|
||||
$set: {
|
||||
'metadata.collectionId': String(collectionId)
|
||||
}
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
};
|
||||
|
||||
export const getDatasetImagePreviewUrl = ({
|
||||
imageId,
|
||||
teamId,
|
||||
datasetId,
|
||||
expiredMinutes
|
||||
}: {
|
||||
imageId: string;
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
expiredMinutes: number;
|
||||
}) => {
|
||||
const expiredTime = Math.floor(addMinutes(new Date(), expiredMinutes).getTime() / 1000);
|
||||
|
||||
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
|
||||
const token = jwt.sign(
|
||||
{
|
||||
teamId: String(teamId),
|
||||
datasetId: String(datasetId),
|
||||
exp: expiredTime
|
||||
},
|
||||
key
|
||||
);
|
||||
|
||||
return `/api/core/dataset/image/${imageId}?token=${token}`;
|
||||
};
|
||||
export const authDatasetImagePreviewUrl = (token?: string) =>
|
||||
new Promise<{
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
}>((resolve, reject) => {
|
||||
if (!token) {
|
||||
return reject(ERROR_ENUM.unAuthFile);
|
||||
}
|
||||
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
|
||||
|
||||
jwt.verify(token, key, (err, decoded: any) => {
|
||||
if (err || !decoded?.teamId || !decoded?.datasetId) {
|
||||
reject(ERROR_ENUM.unAuthFile);
|
||||
return;
|
||||
}
|
||||
resolve({
|
||||
teamId: decoded.teamId,
|
||||
datasetId: decoded.datasetId
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
export const clearDatasetImages = async (datasetIds: string[]) => {
|
||||
if (datasetIds.length === 0) return;
|
||||
const images = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.datasetId': { $in: datasetIds.map((item) => String(item)) }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
await Promise.all(images.map((image) => deleteDatasetImage(String(image._id))));
|
||||
};
|
||||
|
||||
export const clearCollectionImages = async (collectionIds: string[]) => {
|
||||
if (collectionIds.length === 0) return;
|
||||
const images = await MongoDatasetImageSchema.find(
|
||||
{
|
||||
'metadata.collectionId': { $in: collectionIds.map((item) => String(item)) }
|
||||
},
|
||||
'_id'
|
||||
).lean();
|
||||
await Promise.all(images.map((image) => deleteDatasetImage(String(image._id))));
|
||||
};
|
Reference in New Issue
Block a user