mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-27 08:25:07 +00:00

* Dataset collection forbid (#1885) * perf: tool call support same id * feat: collection forbid * feat: collection forbid * Inheritance Permission for apps (#1897) * feat: app schema define chore: references of authapp * feat: authApp method inheritance * feat: create and update api * feat: update * feat: inheritance Permission controller for app. * feat: abstract version of inheritPermission * feat: ancestorId for apps * chore: update app * fix: inheritPermission abstract version * feat: update folder defaultPermission * feat: app update api * chore: inheritance frontend * chore: app list api * feat: update defaultPermission in app deatil * feat: backend api finished * feat: app inheritance permission fe * fix: app update defaultpermission causes collaborator miss * fix: ts error * chore: adjust the codes * chore: i18n chore: i18n * chore: fe adjust and i18n * chore: adjust the code * feat: resume api; chore: rewrite update api and inheritPermission methods * chore: something * chore: fe code adjusting * feat: frontend adjusting * chore: fe code adjusting * chore: adjusting the code * perf: fe loading * format * Inheritance fix (#1908) * fix: SlideCard * fix: authapp did not return parent app for inheritance app * fix: fe adjusting * feat: fe adjusing * perf: inherit per ux * doc * fix: ts errors (#1916) * perf: inherit permission * fix: permission inherit * Workflow type (#1938) * perf: workflow type tmp workflow perf: workflow type feat: custom field config * perf: dynamic input * perf: node classify * perf: node classify * perf: node classify * perf: node classify * fix: workflow custom input * feat: text editor and customFeedback move to basic nodes * feat: community system plugin * fix: ts * feat: exprEval plugin * perf: workflow type * perf: plugin important * fix: default templates * perf: markdown hr css * lock * perf: fetch url * perf: new plugin version * fix: chat histories update * fix: collection paths invalid * perf: app card ui --------- Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
193 lines
4.7 KiB
TypeScript
193 lines
4.7 KiB
TypeScript
import { Types, connectionMongo, ReadPreference } from '../../mongo';
|
|
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
|
import fsp from 'fs/promises';
|
|
import fs from 'fs';
|
|
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
|
|
import { MongoFileSchema } from './schema';
|
|
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
|
|
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
|
import { MongoRawTextBuffer } from '../../buffer/rawText/schema';
|
|
import { readRawContentByFileBuffer } from '../read/utils';
|
|
import { gridFsStream2Buffer, stream2Encoding } from './utils';
|
|
import { addLog } from '../../system/log';
|
|
|
|
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
|
|
MongoFileSchema;
|
|
return connectionMongo.connection.db.collection(`${bucket}.files`);
|
|
}
|
|
export function getGridBucket(bucket: `${BucketNameEnum}`) {
|
|
return new connectionMongo.mongo.GridFSBucket(connectionMongo.connection.db, {
|
|
bucketName: bucket,
|
|
// @ts-ignore
|
|
readPreference: ReadPreference.SECONDARY_PREFERRED // Read from secondary node
|
|
});
|
|
}
|
|
|
|
/* crud file */
|
|
export async function uploadFile({
|
|
bucketName,
|
|
teamId,
|
|
tmbId,
|
|
path,
|
|
filename,
|
|
contentType,
|
|
metadata = {}
|
|
}: {
|
|
bucketName: `${BucketNameEnum}`;
|
|
teamId: string;
|
|
tmbId: string;
|
|
path: string;
|
|
filename: string;
|
|
contentType?: string;
|
|
metadata?: Record<string, any>;
|
|
}) {
|
|
if (!path) return Promise.reject(`filePath is empty`);
|
|
if (!filename) return Promise.reject(`filename is empty`);
|
|
|
|
const stats = await fsp.stat(path);
|
|
if (!stats.isFile()) return Promise.reject(`${path} is not a file`);
|
|
|
|
const { stream: readStream, encoding } = await stream2Encoding(fs.createReadStream(path));
|
|
|
|
metadata.teamId = teamId;
|
|
metadata.tmbId = tmbId;
|
|
metadata.encoding = encoding;
|
|
|
|
// create a gridfs bucket
|
|
const bucket = getGridBucket(bucketName);
|
|
|
|
const stream = bucket.openUploadStream(filename, {
|
|
metadata,
|
|
contentType
|
|
});
|
|
|
|
// save to gridfs
|
|
await new Promise((resolve, reject) => {
|
|
readStream
|
|
.pipe(stream as any)
|
|
.on('finish', resolve)
|
|
.on('error', reject);
|
|
});
|
|
|
|
return String(stream.id);
|
|
}
|
|
|
|
export async function getFileById({
|
|
bucketName,
|
|
fileId
|
|
}: {
|
|
bucketName: `${BucketNameEnum}`;
|
|
fileId: string;
|
|
}) {
|
|
const db = getGFSCollection(bucketName);
|
|
const file = await db.findOne<DatasetFileSchema>({
|
|
_id: new Types.ObjectId(fileId)
|
|
});
|
|
|
|
// if (!file) {
|
|
// return Promise.reject('File not found');
|
|
// }
|
|
|
|
return file || undefined;
|
|
}
|
|
|
|
export async function delFileByFileIdList({
|
|
bucketName,
|
|
fileIdList,
|
|
retry = 3
|
|
}: {
|
|
bucketName: `${BucketNameEnum}`;
|
|
fileIdList: string[];
|
|
retry?: number;
|
|
}): Promise<any> {
|
|
try {
|
|
const bucket = getGridBucket(bucketName);
|
|
|
|
await Promise.all(fileIdList.map((id) => bucket.delete(new Types.ObjectId(id))));
|
|
} catch (error) {
|
|
if (retry > 0) {
|
|
return delFileByFileIdList({ bucketName, fileIdList, retry: retry - 1 });
|
|
}
|
|
}
|
|
}
|
|
|
|
export async function getDownloadStream({
|
|
bucketName,
|
|
fileId
|
|
}: {
|
|
bucketName: `${BucketNameEnum}`;
|
|
fileId: string;
|
|
}) {
|
|
const bucket = getGridBucket(bucketName);
|
|
|
|
return bucket.openDownloadStream(new Types.ObjectId(fileId));
|
|
}
|
|
|
|
export const readFileContentFromMongo = async ({
|
|
teamId,
|
|
bucketName,
|
|
fileId,
|
|
isQAImport = false
|
|
}: {
|
|
teamId: string;
|
|
bucketName: `${BucketNameEnum}`;
|
|
fileId: string;
|
|
isQAImport?: boolean;
|
|
}): Promise<{
|
|
rawText: string;
|
|
filename: string;
|
|
}> => {
|
|
// read buffer
|
|
const fileBuffer = await MongoRawTextBuffer.findOne({ sourceId: fileId }).lean();
|
|
if (fileBuffer) {
|
|
return {
|
|
rawText: fileBuffer.rawText,
|
|
filename: fileBuffer.metadata?.filename || ''
|
|
};
|
|
}
|
|
|
|
const [file, fileStream] = await Promise.all([
|
|
getFileById({ bucketName, fileId }),
|
|
getDownloadStream({ bucketName, fileId })
|
|
]);
|
|
// console.log('get file stream', Date.now() - start);
|
|
if (!file) {
|
|
return Promise.reject(CommonErrEnum.fileNotFound);
|
|
}
|
|
|
|
const extension = file?.filename?.split('.')?.pop()?.toLowerCase() || '';
|
|
|
|
const start = Date.now();
|
|
const fileBuffers = await gridFsStream2Buffer(fileStream);
|
|
addLog.debug('get file buffer', { time: Date.now() - start });
|
|
|
|
const encoding = file?.metadata?.encoding || detectFileEncoding(fileBuffers);
|
|
|
|
const { rawText } = await readRawContentByFileBuffer({
|
|
extension,
|
|
isQAImport,
|
|
teamId,
|
|
buffer: fileBuffers,
|
|
encoding,
|
|
metadata: {
|
|
relatedId: fileId
|
|
}
|
|
});
|
|
|
|
// < 14M
|
|
if (fileBuffers.length < 14 * 1024 * 1024 && rawText.trim()) {
|
|
MongoRawTextBuffer.create({
|
|
sourceId: fileId,
|
|
rawText,
|
|
metadata: {
|
|
filename: file.filename
|
|
}
|
|
});
|
|
}
|
|
|
|
return {
|
|
rawText,
|
|
filename: file.filename
|
|
};
|
|
};
|