V4.14.4 features (#6036)

* feat: add query optimize and bill (#6021)

* add query optimize and bill

* perf: query extension

* fix: embe model

* remove log

* remove log

* fix: test

---------

Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: archer <545436317@qq.com>

* feat: notice (#6013)

* feat: record user's language

* feat: notice points/dataset indexes; support count limit; update docker-compose.yml

* fix: ts error

* feat: send auth code i18n

* chore: dataset notice limit

* chore: adjust

* fix: ts

* fix: countLimit race condition; i18n en-prefix locale fallback to en

---------

Co-authored-by: archer <545436317@qq.com>

* perf: comment

* perf: send inform code

* fix: type error (#6029)

* feat: add ip region for chat logs (#6010)

* feat: add ip region for chat logs

* refactor: use Geolite2.mmdb

* fix: export chat logs

* fix: return location directly

* test: add unit test

* perf: log show ip data

* adjust commercial plans (#6008)

* plan frontend

* plan limit

* coupon

* discount coupon

* fix

* type

* fix audit

* type

* plan name

* legacy plan

* track

* feat: add discount coupon

* fix

* fix discount coupon

* openapi

* type

* type

* env

* api type

* fix

* fix: simple agent plugin input & agent dashboard card (#6034)

* refactor: remove gridfs (#6031)

* fix: replace gridfs multer operations with s3 compatible ops

* wip: s3 features

* refactor: remove gridfs

* fix

* perf: mock test

* doc

* doc

* doc

* fix: test

* fix: s3

* fix: mock s3

* remove invalid config

* fix: init query extension

* initv4144 (#6037)

* chore: initv4144

* fix

* version

* fix: new plans (#6039)

* fix: new plans

* qr modal tip

* fix: buffer raw text filename (#6040)

* fix: initv4144 (#6041)

* fix: pay refresh (#6042)

* fix: migration shell

* rename collection

* clear timerlock

* clear timerlock

* perf: faq

* perf: bill schema

* fix: openapi

* doc

* fix: share var render

* feat: delete dataset queue

* plan usage display (#6043)

* plan usage display

* text

* fix

* fix: ts

* perf: remove invalid code

* perf: init shell

* doc

* perf: rename field

* perf: avatar presign

* init

* custom plan text (#6045)

* fix plans

* fix

* fixed

* computed

---------

Co-authored-by: archer <545436317@qq.com>

* init shell

* plan text & price page back button (#6046)

* init

* index

* delete dataset

* delete dataset

* perf: delete dataset

* init

---------

Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com>
Co-authored-by: xxyyh <2289112474@qq>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: Roy <whoeverimf5@gmail.com>
Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2025-12-08 01:44:15 +08:00
committed by GitHub
parent 9d72f238c0
commit 2ccb5b50c6
247 changed files with 7342 additions and 3819 deletions
@@ -1,17 +1,6 @@
import { addMinutes } from 'date-fns';
import { bucketName, MongoDatasetImageSchema } from './schema';
import { connectionMongo, Types } from '../../../common/mongo';
import fs from 'fs';
import type { FileType } from '../../../common/file/multer';
import fsp from 'fs/promises';
import { computeGridFsChunSize } from '../../../common/file/gridfs/utils';
import { setCron } from '../../../common/system/cron';
import { checkTimerLock } from '../../../common/system/timerLock/utils';
import { TimerIdEnum } from '../../../common/system/timerLock/constants';
import { addLog } from '../../../common/system/log';
import { UserError } from '@fastgpt/global/common/error/utils';
import { getS3DatasetSource, S3DatasetSource } from '../../../common/s3/sources/dataset';
import { isS3ObjectKey } from '../../../common/s3/utils';
const getGridBucket = () => {
return new connectionMongo.mongo.GridFSBucket(connectionMongo.connection.db!, {
@@ -19,53 +8,6 @@ const getGridBucket = () => {
});
};
export const createDatasetImage = async ({
teamId,
datasetId,
file,
expiredTime = addMinutes(new Date(), 30)
}: {
teamId: string;
datasetId: string;
file: FileType;
expiredTime?: Date;
}): Promise<{ imageId: string; previewUrl: string }> => {
const path = file.path;
const gridBucket = getGridBucket();
const metadata = {
teamId: String(teamId),
datasetId: String(datasetId),
expiredTime
};
const stats = await fsp.stat(path);
if (!stats.isFile()) return Promise.reject(`${path} is not a file`);
const readStream = fs.createReadStream(path, {
highWaterMark: 256 * 1024
});
const chunkSizeBytes = computeGridFsChunSize(stats.size);
const stream = gridBucket.openUploadStream(file.originalname, {
metadata,
contentType: file.mimetype,
chunkSizeBytes
});
// save to gridfs
await new Promise((resolve, reject) => {
readStream
.pipe(stream as any)
.on('finish', resolve)
.on('error', reject);
});
return {
imageId: String(stream.id),
previewUrl: ''
};
};
export const getDatasetImageReadData = async (imageId: string) => {
// Get file metadata to get contentType
const fileInfo = await MongoDatasetImageSchema.findOne({
@@ -81,93 +23,3 @@ export const getDatasetImageReadData = async (imageId: string) => {
fileInfo
};
};
export const getDatasetImageBase64 = async (imageId: string) => {
// Get file metadata to get contentType
const fileInfo = await MongoDatasetImageSchema.findOne({
_id: new Types.ObjectId(imageId)
}).lean();
if (!fileInfo) {
return Promise.reject(new UserError('Image not found'));
}
// Get image stream from GridFS
const { stream } = await getDatasetImageReadData(imageId);
// Convert stream to buffer
const chunks: Buffer[] = [];
return new Promise<string>((resolve, reject) => {
stream.on('data', (chunk: Buffer) => {
chunks.push(chunk);
});
stream.on('end', () => {
// Combine all chunks into a single buffer
const buffer = Buffer.concat(chunks);
// Convert buffer to base64 string
const base64 = buffer.toString('base64');
const dataUrl = `data:${fileInfo.contentType || 'image/jpeg'};base64,${base64}`;
resolve(dataUrl);
});
stream.on('error', reject);
});
};
export const deleteDatasetImage = async (imageId: string) => {
const gridBucket = getGridBucket();
try {
if (isS3ObjectKey(imageId, 'dataset')) {
await getS3DatasetSource().deleteDatasetFileByKey(imageId);
} else {
await gridBucket.delete(new Types.ObjectId(imageId));
}
} catch (error: any) {
const msg = error?.message;
if (msg.includes('File not found')) {
addLog.warn('Delete dataset image error', error);
return;
} else {
return Promise.reject(error);
}
}
};
export const clearExpiredDatasetImageCron = async () => {
const gridBucket = getGridBucket();
const clearExpiredDatasetImages = async () => {
addLog.debug('Clear expired dataset image start');
const data = await MongoDatasetImageSchema.find(
{
'metadata.expiredTime': { $lt: new Date() }
},
'_id'
).lean();
for (const item of data) {
try {
await gridBucket.delete(new Types.ObjectId(item._id));
} catch (error) {
addLog.error('Delete expired dataset image error', error);
}
}
addLog.debug('Clear expired dataset image end');
};
setCron('*/10 * * * *', async () => {
if (
await checkTimerLock({
timerId: TimerIdEnum.clearExpiredDatasetImage,
lockMinuted: 9
})
) {
try {
await clearExpiredDatasetImages();
} catch (error) {
addLog.error('clearExpiredDatasetImageCron error', error);
}
}
});
};