4.6.4-alpha (#582)

This commit is contained in:
Archer
2023-12-08 15:01:11 +08:00
committed by GitHub
parent 54d52d8d25
commit b58249fc3a
66 changed files with 962 additions and 527 deletions

View File

@@ -3,6 +3,7 @@ import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import fsp from 'fs/promises';
import fs from 'fs';
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
import { delImgByFileIdList } from '../image/controller';
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
return connectionMongo.connection.db.collection(`${bucket}.files`);
@@ -69,24 +70,65 @@ export async function getFileById({
_id: new Types.ObjectId(fileId)
});
if (!file) {
return Promise.reject('File not found');
}
// if (!file) {
// return Promise.reject('File not found');
// }
return file;
return file || undefined;
}
export async function delFileById({
export async function delFileByFileIdList({
bucketName,
fileId
fileIdList,
retry = 3
}: {
bucketName: `${BucketNameEnum}`;
fileId: string;
fileIdList: string[];
retry?: number;
}): Promise<any> {
try {
const bucket = getGridBucket(bucketName);
await Promise.all(fileIdList.map((id) => bucket.delete(new Types.ObjectId(id))));
} catch (error) {
if (retry > 0) {
return delFileByFileIdList({ bucketName, fileIdList, retry: retry - 1 });
}
}
}
// delete file by metadata(datasetId)
export async function delFileByMetadata({
bucketName,
datasetId
}: {
bucketName: `${BucketNameEnum}`;
datasetId?: string;
}) {
const bucket = getGridBucket(bucketName);
await bucket.delete(new Types.ObjectId(fileId));
return true;
const files = await bucket
.find(
{
...(datasetId && { 'metadata.datasetId': datasetId })
},
{
projection: {
_id: 1
}
}
)
.toArray();
const idList = files.map((item) => String(item._id));
// delete img
await delImgByFileIdList(idList);
// delete file
await delFileByFileIdList({
bucketName,
fileIdList: idList
});
}
export async function getDownloadStream({

View File

@@ -1,3 +1,4 @@
import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { imageBaseUrl } from './constant';
import { MongoImage } from './schema';
@@ -9,11 +10,10 @@ export const maxImgSize = 1024 * 1024 * 12;
export async function uploadMongoImg({
base64Img,
teamId,
expiredTime
}: {
base64Img: string;
expiredTime,
metadata
}: UploadImgProps & {
teamId: string;
expiredTime?: Date;
}) {
if (base64Img.length > maxImgSize) {
return Promise.reject('Image too large');
@@ -24,7 +24,8 @@ export async function uploadMongoImg({
const { _id } = await MongoImage.create({
teamId,
binary: Buffer.from(base64Data, 'base64'),
expiredTime
expiredTime: expiredTime,
metadata
});
return getMongoImgUrl(String(_id));
@@ -37,3 +38,9 @@ export async function readMongoImg({ id }: { id: string }) {
}
return data?.binary;
}
export async function delImgByFileIdList(fileIds: string[]) {
return MongoImage.deleteMany({
'metadata.fileId': { $in: fileIds.map((item) => String(item)) }
});
}

View File

@@ -5,13 +5,17 @@ const { Schema, model, models } = connectionMongo;
const ImageSchema = new Schema({
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName
ref: TeamCollectionName,
required: true
},
binary: {
type: Buffer
},
expiredTime: {
type: Date
},
metadata: {
type: Object
}
});
@@ -21,7 +25,7 @@ try {
console.log(error);
}
export const MongoImage: Model<{ teamId: string; binary: Buffer }> =
export const MongoImage: Model<{ teamId: string; binary: Buffer; metadata?: Record<string, any> }> =
models['image'] || model('image', ImageSchema);
MongoImage.syncIndexes();

View File

@@ -82,7 +82,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
} else if (error?.response?.data?.error?.message) {
msg = error?.response?.data?.error?.message;
} else if (error?.error?.message) {
msg = error?.error?.message;
msg = `${error?.error?.code} ${error?.error?.message}`;
}
addLog.error(`sse error: ${msg}`, error);