Files
FastGPT/packages/service/support/permission/dataset/auth.ts
Archer c30f069f2f V4.9.11 feature (#4969)
* Feat: Images dataset collection (#4941)

* New pic (#4858)

* 更新数据集相关类型,添加图像文件ID和预览URL支持;优化数据集导入功能,新增图像数据集处理组件;修复部分国际化文本;更新文件上传逻辑以支持新功能。

* 与原先代码的差别

* 新增 V4.9.10 更新说明,支持 PG 设置`systemEnv.hnswMaxScanTuples`参数,优化 LLM stream 调用超时,修复全文检索多知识库排序问题。同时更新数据集索引,移除 datasetId 字段以简化查询。

* 更换成fileId_image逻辑,并增加训练队列匹配的逻辑

* 新增图片集合判断逻辑,优化预览URL生成流程,确保仅在数据集为图片集合时生成预览URL,并添加相关日志输出以便调试。

* Refactor Docker Compose configuration to comment out exposed ports for production environments, update image versions for pgvector, fastgpt, and mcp_server, and enhance Redis service with a health check. Additionally, standardize dataset collection labels in constants and improve internationalization strings across multiple languages.

* Enhance TrainingStates component by adding internationalization support for the imageParse training mode and update defaultCounts to include imageParse mode in trainingDetail API.

* Enhance dataset import context by adding additional steps for image dataset import process and improve internationalization strings for modal buttons in the useEditTitle hook.

* Update DatasetImportContext to conditionally render MyStep component based on data source type, improving the import process for non-image datasets.

* Refactor image dataset handling by improving internationalization strings, enhancing error messages, and streamlining the preview URL generation process.

* 图片上传到新建的 dataset_collection_images 表,逻辑跟随更改

* 修改了除了controller的其他部分问题

* 把图片数据集的逻辑整合到controller里面

* 补充i18n

* 补充i18n

* resolve评论:主要是上传逻辑的更改和组件复用

* 图片名称的图标显示

* 修改编译报错的命名问题

* 删除不需要的collectionid部分

* 多余文件的处理和改动一个删除按钮

* 除了loading和统一的imageId,其他都resolve掉的

* 处理图标报错

* 复用了MyPhotoView并采用全部替换的方式将imageFileId变成imageId

* 去除不必要文件修改

* 报错和字段修改

* 增加上传成功后删除临时文件的逻辑以及回退一些修改

* 删除path字段,将图片保存到gridfs内,并修改增删等操作的代码

* 修正编译错误

---------

Co-authored-by: archer <545436317@qq.com>

* perf: image dataset

* feat: insert image

* perf: image icon

* fix: training state

---------

Co-authored-by: Zhuangzai fa <143257420+ctrlz526@users.noreply.github.com>

* fix: ts (#4948)

* Thirddatasetmd (#4942)

* add thirddataset.md

* fix thirddataset.md

* fix

* delete wrong png

---------

Co-authored-by: dreamer6680 <146868355@qq.com>

* perf: api dataset code

* perf: log

* add secondary.tsx (#4946)

* add secondary.tsx

* fix

---------

Co-authored-by: dreamer6680 <146868355@qq.com>

* perf: multiple menu

* perf: i18n

* feat: parse queue (#4960)

* feat: parse queue

* feat: sync parse queue

* fix thirddataset.md (#4962)

* fix thirddataset-4.png (#4963)

* feat: Dataset template import (#4934)

* 模版导入部分除了文档还没写

* 修复模版导入的 build 错误

* Document production

* compress pictures

* Change some constants to variables

---------

Co-authored-by: Archer <545436317@qq.com>

* perf: template import

* doc

* llm pargraph

* bocha tool

* fix: del collection

---------

Co-authored-by: Zhuangzai fa <143257420+ctrlz526@users.noreply.github.com>
Co-authored-by: dreamer6680 <1468683855@qq.com>
Co-authored-by: dreamer6680 <146868355@qq.com>
2025-06-06 14:48:44 +08:00

296 lines
7.4 KiB
TypeScript

import { type PermissionValueType } from '@fastgpt/global/support/permission/type';
import { getResourcePermission, parseHeaderCert } from '../controller';
import {
type CollectionWithDatasetType,
type DatasetDataItemType,
type DatasetSchemaType
} from '@fastgpt/global/core/dataset/type';
import { getTmbInfoByTmbId } from '../../user/team/controller';
import { MongoDataset } from '../../../core/dataset/schema';
import { NullPermission, PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import { getCollectionWithDataset } from '../../../core/dataset/controller';
import { MongoDatasetData } from '../../../core/dataset/data/schema';
import { type AuthModeType, type AuthResponseType } from '../type';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { type ParentIdType } from '@fastgpt/global/common/parentFolder/type';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { getDatasetImagePreviewUrl } from '../../../core/dataset/image/utils';
export const authDatasetByTmbId = async ({
tmbId,
datasetId,
per,
isRoot = false
}: {
tmbId: string;
datasetId: string;
per: PermissionValueType;
isRoot?: boolean;
}): Promise<{
dataset: DatasetSchemaType & {
permission: DatasetPermission;
};
}> => {
const dataset = await (async () => {
const [{ teamId, permission: tmbPer }, dataset] = await Promise.all([
getTmbInfoByTmbId({ tmbId }),
MongoDataset.findOne({ _id: datasetId }).lean()
]);
if (!dataset) {
return Promise.reject(DatasetErrEnum.unExist);
}
if (isRoot) {
return {
...dataset,
permission: new DatasetPermission({
isOwner: true
})
};
}
if (String(dataset.teamId) !== teamId) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
const isOwner = tmbPer.isOwner || String(dataset.tmbId) === String(tmbId);
// get dataset permission or inherit permission from parent folder.
const { Per } = await (async () => {
if (isOwner) {
return {
Per: new DatasetPermission({ isOwner: true })
};
}
if (
dataset.type === DatasetTypeEnum.folder ||
dataset.inheritPermission === false ||
!dataset.parentId
) {
// 1. is a folder. (Folders have compeletely permission)
// 2. inheritPermission is false.
// 3. is root folder/dataset.
const rp = await getResourcePermission({
teamId,
tmbId,
resourceId: datasetId,
resourceType: PerResourceTypeEnum.dataset
});
const Per = new DatasetPermission({
per: rp ?? DatasetDefaultPermissionVal,
isOwner
});
return {
Per
};
} else {
// is not folder and inheritPermission is true and is not root folder.
const { dataset: parent } = await authDatasetByTmbId({
tmbId,
datasetId: dataset.parentId,
per,
isRoot
});
const Per = new DatasetPermission({
per: parent.permission.value,
isOwner
});
return {
Per
};
}
})();
if (!Per.checkPer(per)) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
return {
...dataset,
permission: Per
};
})();
return { dataset };
};
export const authDataset = async ({
datasetId,
per,
...props
}: AuthModeType & {
datasetId: ParentIdType;
per: PermissionValueType;
}): Promise<
AuthResponseType & {
dataset: DatasetSchemaType & {
permission: DatasetPermission;
};
}
> => {
const result = await parseHeaderCert(props);
const { tmbId } = result;
if (!datasetId) {
return Promise.reject(DatasetErrEnum.unExist);
}
const { dataset } = await authDatasetByTmbId({
tmbId,
datasetId,
per,
isRoot: result.isRoot
});
return {
...result,
permission: dataset.permission,
dataset
};
};
// the temporary solution for authDatasetCollection is getting the
export async function authDatasetCollection({
collectionId,
per = NullPermission,
isRoot = false,
...props
}: AuthModeType & {
collectionId: string;
isRoot?: boolean;
}): Promise<
AuthResponseType<DatasetPermission> & {
collection: CollectionWithDatasetType;
}
> {
const { teamId, tmbId, userId, isRoot: isRootFromHeader } = await parseHeaderCert(props);
const collection = await getCollectionWithDataset(collectionId);
if (!collection) {
return Promise.reject(DatasetErrEnum.unExist);
}
const { dataset } = await authDatasetByTmbId({
tmbId,
datasetId: collection.datasetId,
per,
isRoot: isRootFromHeader
});
return {
userId,
teamId,
tmbId,
collection,
permission: dataset.permission,
isRoot: isRootFromHeader
};
}
// export async function authDatasetFile({
// fileId,
// per,
// ...props
// }: AuthModeType & {
// fileId: string;
// }): Promise<
// AuthResponseType<DatasetPermission> & {
// file: DatasetFileSchema;
// }
// > {
// const { teamId, tmbId, isRoot } = await parseHeaderCert(props);
// const [file, collection] = await Promise.all([
// getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
// MongoDatasetCollection.findOne({
// teamId,
// fileId
// })
// ]);
// if (!file) {
// return Promise.reject(CommonErrEnum.fileNotFound);
// }
// if (!collection) {
// return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
// }
// try {
// const { permission } = await authDatasetCollection({
// ...props,
// collectionId: collection._id,
// per,
// isRoot
// });
// return {
// teamId,
// tmbId,
// file,
// permission,
// isRoot
// };
// } catch (error) {
// return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
// }
// }
/*
DatasetData permission is inherited from collection.
*/
export async function authDatasetData({
dataId,
...props
}: AuthModeType & {
dataId: string;
}) {
// get mongo dataset.data
const datasetData = await MongoDatasetData.findById(dataId);
if (!datasetData) {
return Promise.reject('core.dataset.error.Data not found');
}
const result = await authDatasetCollection({
...props,
collectionId: datasetData.collectionId
});
const data: DatasetDataItemType = {
id: String(datasetData._id),
teamId: datasetData.teamId,
updateTime: datasetData.updateTime,
q: datasetData.q,
a: datasetData.a,
imageId: datasetData.imageId,
imagePreivewUrl: datasetData.imageId
? getDatasetImagePreviewUrl({
imageId: datasetData.imageId,
teamId: datasetData.teamId,
datasetId: datasetData.datasetId,
expiredMinutes: 30
})
: undefined,
chunkIndex: datasetData.chunkIndex,
indexes: datasetData.indexes,
datasetId: String(datasetData.datasetId),
collectionId: String(datasetData.collectionId),
sourceName: result.collection.name || '',
sourceId: result.collection?.fileId || result.collection?.rawLink,
isOwner: String(datasetData.tmbId) === String(result.tmbId)
// permission: result.permission
};
return {
...result,
datasetData: data,
collection: result.collection
};
}