dataset inheritance permission (#2151)

* refactor: dataset create and update api

* chore: defaultpermission & resume fe

* refactor: database auth

* fix(ts): add inheritPermission into default data types

* chore: adjust the code

* fix: list api type filter

* fix: query condition
This commit is contained in:
Finley Ge
2024-07-25 19:03:24 +08:00
committed by GitHub
parent 5906daff9f
commit 65515e7952
20 changed files with 481 additions and 199 deletions

View File

@@ -8,8 +8,8 @@ export enum DatasetErrEnum {
unAuthDatasetCollection = 'unAuthDatasetCollection',
unAuthDatasetData = 'unAuthDatasetData',
unAuthDatasetFile = 'unAuthDatasetFile',
unLinkCollection = 'unLinkCollection'
unLinkCollection = 'unLinkCollection',
invalidVectorModelOrQAModel = 'invalidVectorModelOrQAModel'
}
const datasetErr = [
{
@@ -39,6 +39,10 @@ const datasetErr = [
{
statusText: DatasetErrEnum.unLinkCollection,
message: 'core.dataset.error.unLinkCollection'
},
{
statusText: DatasetErrEnum.invalidVectorModelOrQAModel,
message: 'core.dataset.error.invalidVectorModelOrQAModel'
}
];
export default datasetErr.reduce((acc, cur, index) => {

View File

@@ -10,7 +10,6 @@ export type DatasetUpdateBody = {
name?: string;
avatar?: string;
intro?: string;
permission?: DatasetSchemaType['permission']; // TODO: Should be deleted.
agentModel?: LLMModelItemType;
status?: DatasetSchemaType['status'];

View File

@@ -1,4 +1,4 @@
import { PermissionValueType } from 'support/permission/type';
import { PermissionSchemaType } from '../../support/permission/type';
import type { LLMModelItemType, VectorModelItemType } from '../../core/ai/model.d';
import { PermissionTypeEnum } from '../../support/permission/constant';
import { PushDatasetDataChunkProps } from './api';
@@ -12,31 +12,28 @@ import {
import { DatasetPermission } from '../../support/permission/dataset/controller';
import { Permission } from '../../support/permission/controller';
/* schema */
export type DatasetSchemaType = {
_id: string;
parentId: string;
parentId?: string;
userId: string;
teamId: string;
tmbId: string;
updateTime: Date;
avatar: string;
name: string;
vectorModel: string;
agentModel: string;
intro: string;
type: DatasetTypeEnum;
type: `${DatasetTypeEnum}`;
status: `${DatasetStatusEnum}`;
// permission: DatasetPermission;
// metadata
websiteConfig?: {
url: string;
selector: string;
};
externalReadUrl?: string;
defaultPermission: PermissionValueType;
};
} & PermissionSchemaType;
// } & PermissionSchemaType;
export type DatasetCollectionSchemaType = {
_id: string;
@@ -133,15 +130,13 @@ export type DatasetSimpleItemType = {
};
export type DatasetListItemType = {
_id: string;
parentId: string;
avatar: string;
name: string;
intro: string;
type: DatasetTypeEnum;
type: `${DatasetTypeEnum}`;
permission: DatasetPermission;
vectorModel: VectorModelItemType;
defaultPermission: PermissionValueType;
};
} & PermissionSchemaType;
export type DatasetItemType = Omit<DatasetSchemaType, 'vectorModel' | 'agentModel'> & {
vectorModel: VectorModelItemType;

View File

@@ -1,6 +1,4 @@
import { connectionMongo, getMongoModel, type Model } from '../../common/mongo';
const { Schema, model, models } = connectionMongo;
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type.d';
import { getMongoModel, Schema } from '../../common/mongo';
import {
DatasetStatusEnum,
DatasetStatusMap,
@@ -12,6 +10,8 @@ import {
TeamMemberCollectionName
} from '@fastgpt/global/support/user/team/constant';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { getPermissionSchema } from '@fastgpt/global/support/permission/utils';
import type { DatasetSchemaType } from '@fastgpt/global/core/dataset/type.d';
export const DatasetCollectionName = 'datasets';
@@ -85,11 +85,10 @@ const DatasetSchema = new Schema({
}
}
},
externalReadUrl: String,
defaultPermission: {
type: Number,
default: DatasetDefaultPermissionVal
}
externalReadUrl: {
type: String
},
...getPermissionSchema(DatasetDefaultPermissionVal)
});
try {

View File

@@ -17,10 +17,11 @@ import { getFileById } from '../../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoDatasetData } from '../../../core/dataset/data/schema';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { AuthModeType, AuthResponseType } from '../type';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
export async function authDatasetByTmbId({
export const authDatasetByTmbId = async ({
tmbId,
datasetId,
per
@@ -28,30 +29,64 @@ export async function authDatasetByTmbId({
tmbId: string;
datasetId: string;
per: PermissionValueType;
}) {
const { teamId, permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
}): Promise<{
dataset: DatasetSchemaType & {
permission: DatasetPermission;
};
}> => {
const dataset = await (async () => {
// get app and per
const [dataset, rp] = await Promise.all([
MongoDataset.findOne({ _id: datasetId, teamId }).lean(),
getResourcePermission({
teamId,
tmbId,
resourceId: datasetId,
resourceType: PerResourceTypeEnum.dataset
}) // this could be null
const [{ teamId, permission: tmbPer }, dataset] = await Promise.all([
getTmbInfoByTmbId({ tmbId }),
MongoDataset.findOne({ _id: datasetId }).lean()
]);
if (!dataset) {
return Promise.reject(DatasetErrEnum.unExist);
}
const isOwner = tmbPer.isOwner || String(dataset.tmbId) === String(tmbId);
// get dataset permission or inherit permission from parent folder.
const { Per, defaultPermission } = await (async () => {
if (
dataset.type === DatasetTypeEnum.folder ||
dataset.inheritPermission === false ||
!dataset.parentId
) {
// 1. is a folder. (Folders have compeletely permission)
// 2. inheritPermission is false.
// 3. is root folder/dataset.
const rp = await getResourcePermission({
teamId,
tmbId,
resourceId: datasetId,
resourceType: PerResourceTypeEnum.dataset
});
const Per = new DatasetPermission({
per: rp?.permission ?? dataset.defaultPermission,
isOwner
});
return {
Per,
defaultPermission: dataset.defaultPermission
};
} else {
// is not folder and inheritPermission is true and is not root folder.
const { dataset: parent } = await authDatasetByTmbId({
tmbId,
datasetId: dataset.parentId,
per
});
const Per = new DatasetPermission({
per: parent.permission.value,
isOwner
});
return {
Per,
defaultPermission: parent.defaultPermission
};
}
})();
if (!Per.checkPer(per)) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
@@ -59,27 +94,34 @@ export async function authDatasetByTmbId({
return {
...dataset,
defaultPermission: dataset.defaultPermission ?? DatasetDefaultPermissionVal,
defaultPermission,
permission: Per
};
})();
return { dataset };
}
};
// Auth Dataset
export async function authDataset({
export const authDataset = async ({
datasetId,
per = NullPermission,
per,
...props
}: AuthModeType & {
datasetId: string;
datasetId: ParentIdType;
per: PermissionValueType;
}): Promise<
AuthResponseType<DatasetPermission> & {
dataset: DatasetSchemaType;
AuthResponseType & {
dataset: DatasetSchemaType & {
permission: DatasetPermission;
};
}
> => {
const result = await parseHeaderCert(props);
const { tmbId } = result;
if (!datasetId) {
return Promise.reject(DatasetErrEnum.unExist);
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const { dataset } = await authDatasetByTmbId({
tmbId,
@@ -88,13 +130,11 @@ export async function authDataset({
});
return {
teamId,
tmbId,
dataset,
permission: dataset.permission
...result,
permission: dataset.permission,
dataset
};
}
};
// the temporary solution for authDatasetCollection is getting the
export async function authDatasetCollection({
collectionId,

View File

@@ -511,7 +511,8 @@
"unAuthDatasetData": "Unauthorized to operate this data",
"unAuthDatasetFile": "Unauthorized to operate this file",
"unCreateCollection": "Unauthorized to operate this data",
"unLinkCollection": "Not a network link collection"
"unLinkCollection": "Not a network link collection",
"invalidVectorModelOrQAModel": "Invalid vector model or QA model"
},
"externalFile": "external file repository",
"file": "File",

View File

@@ -511,7 +511,8 @@
"unAuthDatasetData": "无权操作该数据",
"unAuthDatasetFile": "无权操作该文件",
"unCreateCollection": "无权操作该数据",
"unLinkCollection": "不是网络链接集合"
"unLinkCollection": "不是网络链接集合",
"invalidVectorModelOrQAModel": "VectorModel 或 QA 模型错误"
},
"externalFile": "外部文件库",
"file": "文件",

View File

@@ -5,7 +5,7 @@ import React from 'react';
import { DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import { useI18n } from '@/web/context/I18n';
const DatasetTypeTag = ({ type, ...props }: { type: DatasetTypeEnum } & FlexProps) => {
const DatasetTypeTag = ({ type, ...props }: { type: `${DatasetTypeEnum}` } & FlexProps) => {
const { datasetT } = useI18n();
const item = DatasetTypeMap[type] || DatasetTypeMap['dataset'];

View File

@@ -25,7 +25,6 @@ export type CreateDatasetParams = {
avatar: string;
vectorModel?: string;
agentModel?: string;
defaultPermission?: PermissionValueType;
};
export type RebuildEmbeddingProps = {

View File

@@ -1,23 +1,30 @@
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { NullPermission, WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
async function handler(req: NextApiRequest) {
export type DatasetCreateQuery = {};
export type DatasetCreateBody = CreateDatasetParams;
export type DatasetCreateResponse = string;
async function handler(
req: ApiRequestProps<DatasetCreateBody, DatasetCreateQuery>
): Promise<DatasetCreateResponse> {
const {
parentId,
name,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel = getDatasetModel().model,
defaultPermission = NullPermission
} = req.body as CreateDatasetParams;
agentModel = getDatasetModel().model
} = req.body;
// auth
const { teamId, tmbId } = await authUserPer({
@@ -31,25 +38,23 @@ async function handler(req: NextApiRequest) {
const vectorModelStore = getVectorModel(vectorModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid'); // TODO: use enum code
return Promise.reject(DatasetErrEnum.invalidVectorModelOrQAModel);
}
// check limit
await checkTeamDatasetLimit(teamId);
const { _id } = await MongoDataset.create({
...parseParentIdInMongo(parentId),
name,
teamId,
tmbId,
vectorModel,
agentModel,
avatar,
parentId: parentId || null,
type,
defaultPermission
type
});
return _id;
}
export default NextAPI(handler);

View File

@@ -1,4 +1,3 @@
import type { NextApiRequest } from 'next';
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
@@ -14,29 +13,70 @@ import { MongoResourcePermission } from '@fastgpt/service/support/permission/sch
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
export type GetDatasetListBody = { parentId: ParentIdType; type?: DatasetTypeEnum };
export type GetDatasetListBody = {
parentId: ParentIdType;
type?: DatasetTypeEnum;
searchKey?: string;
};
async function handler(req: NextApiRequest) {
const { parentId, type } = req.body as GetDatasetListBody;
async function handler(req: ApiRequestProps<GetDatasetListBody>) {
const { parentId, type, searchKey } = req.body;
// 凭证校验
const {
dataset: parentDataset,
teamId,
tmbId,
permission: tmbPer
} = await authUserPer({
} = await (async () => {
if (parentId) {
return await authDataset({
req,
authToken: true,
per: ReadPermissionVal,
datasetId: parentId
});
}
return {
...(await authUserPer({
req,
authToken: true,
authApiKey: true,
per: ReadPermissionVal
});
})),
dataset: undefined
};
})();
const findDatasetQuery = (() => {
const searchMatch = searchKey
? {
$or: [
{ name: { $regex: new RegExp(`${replaceRegChars(searchKey)}`, 'i') } },
{ intro: { $regex: new RegExp(`${replaceRegChars(searchKey)}`, 'i') } }
]
}
: {};
if (searchKey) {
return {
teamId,
...searchMatch
};
}
return {
teamId,
...(type ? (Array.isArray(type) ? { type: { $in: type } } : { type }) : {}),
...parseParentIdInMongo(parentId)
};
})();
const [myDatasets, rpList] = await Promise.all([
MongoDataset.find({
teamId,
...parseParentIdInMongo(parentId),
...(type && { type })
})
MongoDataset.find(findDatasetQuery)
.sort({
updateTime: -1
})
@@ -50,14 +90,26 @@ async function handler(req: NextApiRequest) {
const filterDatasets = myDatasets
.map((dataset) => {
const Per = (() => {
if (dataset.inheritPermission && parentDataset && dataset.type !== DatasetTypeEnum.folder) {
dataset.defaultPermission = parentDataset.defaultPermission;
const perVal = rpList.find(
(item) => String(item.resourceId) === String(parentDataset._id)
)?.permission;
return new DatasetPermission({
per: perVal ?? parentDataset.defaultPermission,
isOwner: String(parentDataset.tmbId) === tmbId || tmbPer.isOwner
});
} else {
const perVal = rpList.find(
(item) => String(item.resourceId) === String(dataset._id)
)?.permission;
const Per = new DatasetPermission({
return new DatasetPermission({
per: perVal ?? dataset.defaultPermission,
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
});
}
})();
return {
...dataset,
permission: Per
@@ -68,14 +120,14 @@ async function handler(req: NextApiRequest) {
const data = await Promise.all(
filterDatasets.map<DatasetListItemType>((item) => ({
_id: item._id,
parentId: item.parentId,
avatar: item.avatar,
name: item.name,
intro: item.intro,
type: item.type,
permission: item.permission,
vectorModel: getVectorModel(item.vectorModel),
defaultPermission: item.defaultPermission ?? DatasetDefaultPermissionVal
defaultPermission: item.defaultPermission ?? DatasetDefaultPermissionVal,
inheritPermission: item.inheritPermission
}))
);

View File

@@ -0,0 +1,45 @@
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import {
ManagePermissionVal,
PerResourceTypeEnum
} from '@fastgpt/global/support/permission/constant';
import { resumeInheritPermission } from '@fastgpt/service/support/permission/inheritPermission';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
export type ResumeInheritPermissionQuery = {
datasetId: string;
};
export type ResumeInheritPermissionBody = {};
// resume the dataset's inherit permission.
async function handler(
req: ApiRequestProps<ResumeInheritPermissionBody, ResumeInheritPermissionQuery>
) {
const { datasetId } = req.query;
const { dataset } = await authDataset({
datasetId,
req,
authToken: true,
per: ManagePermissionVal
});
if (dataset.parentId) {
await resumeInheritPermission({
resource: dataset,
folderTypeList: [DatasetTypeEnum.folder],
resourceType: PerResourceTypeEnum.dataset,
resourceModel: MongoDataset
});
} else {
await MongoDataset.updateOne(
{
_id: datasetId
},
{
inheritPermission: true
}
);
}
}
export default NextAPI(handler);

View File

@@ -1,15 +1,34 @@
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { NextAPI } from '@/service/middleware/entry';
import {
OwnerPermissionVal,
ManagePermissionVal,
PerResourceTypeEnum,
WritePermissionVal
} from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { ClientSession } from 'mongoose';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
import { getResourceAllClbs } from '@fastgpt/service/support/permission/controller';
import {
syncChildrenPermission,
syncCollaborators
} from '@fastgpt/service/support/permission/inheritPermission';
async function handler(req: NextApiRequest) {
export type DatasetUpdateQuery = {};
export type DatasetUpdateResponse = any;
async function handler(
req: ApiRequestProps<DatasetUpdateBody, DatasetUpdateQuery>,
_res: ApiResponseType<any>
): Promise<DatasetUpdateResponse> {
const {
id,
parentId,
@@ -21,34 +40,131 @@ async function handler(req: NextApiRequest) {
externalReadUrl,
defaultPermission,
status
} = req.body as DatasetUpdateBody;
} = req.body;
if (!id) {
return Promise.reject(CommonErrEnum.missingParams);
}
if (defaultPermission) {
await authDataset({ req, authToken: true, datasetId: id, per: OwnerPermissionVal });
const { dataset } = (await (async () => {
if (defaultPermission !== undefined) {
return await authDataset({ req, authToken: true, datasetId: id, per: ManagePermissionVal });
} else {
await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
return await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
}
})()) as { dataset: DatasetSchemaType };
await MongoDataset.findOneAndUpdate(
const isDefaultPermissionChanged =
defaultPermission !== undefined && dataset.defaultPermission !== defaultPermission;
const isFolder = dataset.type === DatasetTypeEnum.folder;
const onUpdate = async (
session?: ClientSession,
updatedDefaultPermission?: PermissionValueType
) => {
await MongoDataset.findByIdAndUpdate(
id,
{
_id: id
},
{
...(parentId !== undefined && { parentId: parentId || null }),
...parseParentIdInMongo(parentId),
...(name && { name }),
...(avatar && { avatar }),
...(agentModel && { agentModel: agentModel.model }),
...(websiteConfig && { websiteConfig }),
...(status && { status }),
...(intro && { intro }),
...(intro !== undefined && { intro }),
...(externalReadUrl && { externalReadUrl }),
...(defaultPermission !== undefined && { defaultPermission })
}
// move
...(updatedDefaultPermission !== undefined && {
defaultPermission: updatedDefaultPermission
}),
// update the defaultPermission
...(isDefaultPermissionChanged && { inheritPermission: false })
},
{ session }
);
}
};
// move
if (parentId !== undefined) {
await mongoSessionRun(async (session) => {
const parentDefaultPermission = await (async () => {
if (parentId) {
const { dataset: parentDataset } = await authDataset({
req,
authToken: true,
datasetId: parentId,
per: WritePermissionVal
});
return parentDataset.defaultPermission;
}
return DatasetDefaultPermissionVal;
})();
if (isFolder && dataset.inheritPermission) {
const parentClbs = await getResourceAllClbs({
teamId: dataset.teamId,
resourceId: parentId,
resourceType: PerResourceTypeEnum.dataset,
session
});
await syncCollaborators({
teamId: dataset.teamId,
resourceId: id,
resourceType: PerResourceTypeEnum.dataset,
collaborators: parentClbs,
session
});
await syncChildrenPermission({
resource: dataset,
resourceType: PerResourceTypeEnum.dataset,
resourceModel: MongoDataset,
folderTypeList: [DatasetTypeEnum.folder],
collaborators: parentClbs,
defaultPermission: parentDefaultPermission,
session
});
return onUpdate(session, parentDefaultPermission);
}
return onUpdate(session);
});
} else if (isDefaultPermissionChanged) {
await mongoSessionRun(async (session) => {
if (isFolder) {
await syncChildrenPermission({
defaultPermission,
resource: {
_id: dataset._id,
type: dataset.type,
teamId: dataset.teamId,
parentId: dataset.parentId
},
resourceType: PerResourceTypeEnum.dataset,
resourceModel: MongoDataset,
folderTypeList: [DatasetTypeEnum.folder],
session
});
} else if (dataset.inheritPermission && dataset.parentId) {
const parentClbs = await getResourceAllClbs({
teamId: dataset.teamId,
resourceId: parentId,
resourceType: PerResourceTypeEnum.dataset,
session
});
await syncCollaborators({
teamId: dataset.teamId,
resourceId: id,
resourceType: PerResourceTypeEnum.dataset,
collaborators: parentClbs,
session
});
}
return onUpdate(session, defaultPermission);
});
} else {
return onUpdate();
}
}
export default NextAPI(handler);

View File

@@ -20,7 +20,6 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
import AIModelSelector from '@/components/Select/AIModelSelector';
import { useI18n } from '@/web/context/I18n';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
@@ -41,8 +40,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
name: '',
intro: '',
vectorModel: filterNotHiddenVectorModelList[0].model,
agentModel: datasetModelList[0].model,
defaultPermission: DatasetDefaultPermissionVal
agentModel: datasetModelList[0].model
}
});
const avatar = watch('avatar');

View File

@@ -1,5 +1,5 @@
import React, { useMemo, useRef, useState } from 'react';
import { putDatasetById } from '@/web/core/dataset/api';
import { resumeInheritPer } from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { Box, Flex, Grid } from '@chakra-ui/react';
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
@@ -42,16 +42,16 @@ function List() {
const { t } = useTranslation();
const { commonT } = useI18n();
const {
refetchDatasets,
loadMyDatasets,
setMoveDatasetId,
refetchPaths,
refetchFolderDetail,
editedDataset,
setEditedDataset,
onDelDataset
onDelDataset,
onUpdateDataset,
myDatasets
} = useContextSelector(DatasetsContext, (v) => v);
const [editPerDatasetIndex, setEditPerDatasetIndex] = useState<number>();
const { myDatasets, loadMyDatasets } = useDatasetStore();
const [loadingDatasetId, setLoadingDatasetId] = useState<string>();
const { getBoxProps } = useFolderDrag({
@@ -61,11 +61,10 @@ function List() {
onDrop: async (dragId: string, targetId: string) => {
setLoadingDatasetId(dragId);
try {
await putDatasetById({
await onUpdateDataset({
id: dragId,
parentId: targetId
});
refetchDatasets();
} catch (error) {}
setLoadingDatasetId(undefined);
}
@@ -132,7 +131,7 @@ function List() {
() =>
onDelDataset(id).then(() => {
refetchPaths();
refetchDatasets();
loadMyDatasets();
}),
undefined,
DeleteTipsMap.current[DatasetTypeEnum.dataset]
@@ -350,15 +349,12 @@ function List() {
title={commonT('dataset.Edit Info')}
onClose={() => setEditedDataset(undefined)}
onEdit={async (data) => {
await putDatasetById({
await onUpdateDataset({
id: editedDataset.id,
name: data.name,
intro: data.intro,
avatar: data.avatar
});
loadMyDatasets(parentId ? parentId : undefined);
refetchFolderDetail();
refetchPaths();
setEditedDataset(undefined);
}}
/>
@@ -366,18 +362,22 @@ function List() {
{!!editPerDataset && (
<ConfigPerModal
hasParent={!!parentId}
refetchResource={loadMyDatasets}
isInheritPermission={editPerDataset.inheritPermission}
resumeInheritPermission={() =>
resumeInheritPer(editPerDataset._id).then(() => Promise.all([loadMyDatasets()]))
}
avatar={editPerDataset.avatar}
name={editPerDataset.name}
defaultPer={{
value: editPerDataset.defaultPermission,
defaultValue: DatasetDefaultPermissionVal,
onChange: async (e) => {
await putDatasetById({
onChange: (e) =>
onUpdateDataset({
id: editPerDataset._id,
defaultPermission: e
});
refetchDatasets();
}
})
}}
managePer={{
permission: editPerDataset.permission,
@@ -400,7 +400,8 @@ function List() {
deleteDatasetCollaborators({
datasetId: editPerDataset._id,
tmbId
})
}),
refreshDeps: [editPerDataset._id, editPerDataset.inheritPermission]
}}
onClose={() => setEditPerDatasetIndex(undefined)}
/>

View File

@@ -5,7 +5,6 @@ import {
getDatasetById,
delDatasetById
} from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import {
GetResourceFolderListProps,
ParentIdType,
@@ -19,16 +18,17 @@ import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api';
import dynamic from 'next/dynamic';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { DatasetItemType, DatasetListItemType } from '@fastgpt/global/core/dataset/type';
import { EditResourceInfoFormType } from '@/components/common/Modal/EditResourceModal';
import { useTranslation } from 'react-i18next';
const MoveModal = dynamic(() => import('@/components/common/folder/MoveModal'));
export type DatasetContextType = {
refetchDatasets: () => void;
myDatasets: DatasetListItemType[];
loadMyDatasets: () => Promise<DatasetListItemType[]>;
refetchPaths: () => void;
refetchFolderDetail: () => void;
refetchFolderDetail: () => Promise<DatasetItemType | undefined>;
isFetchingDatasets: boolean;
setMoveDatasetId: (id: string) => void;
paths: ParentTreePathItemType[];
@@ -36,28 +36,48 @@ export type DatasetContextType = {
editedDataset?: EditResourceInfoFormType;
setEditedDataset: (data?: EditResourceInfoFormType) => void;
onDelDataset: (id: string) => Promise<void>;
onUpdateDataset: (data: DatasetUpdateBody) => Promise<void>;
};
export const DatasetsContext = createContext<DatasetContextType>({
refetchDatasets: () => {},
isFetchingDatasets: false,
setMoveDatasetId: () => {},
refetchPaths: () => {},
paths: [],
refetchFolderDetail: () => {},
folderDetail: {} as any,
editedDataset: {} as any,
setEditedDataset: () => {},
onDelDataset: () => Promise.resolve()
onDelDataset: () => Promise.resolve(),
loadMyDatasets: function (): Promise<DatasetListItemType[]> {
throw new Error('Function not implemented.');
},
refetchFolderDetail: function (): Promise<DatasetItemType | undefined> {
throw new Error('Function not implemented.');
},
onUpdateDataset: function (_data: DatasetUpdateBody): Promise<void> {
throw new Error('Function not implemented.');
},
myDatasets: []
});
function DatasetContextProvider({ children }: { children: React.ReactNode }) {
const router = useRouter();
const { commonT } = useI18n();
const { t } = useTranslation();
const [moveDatasetId, setMoveDatasetId] = useState<string>();
const { parentId = null } = router.query as { parentId?: string | null };
const { myDatasets, loadMyDatasets } = useDatasetStore();
const { data: myDatasets = [], runAsync: loadMyDatasets } = useRequest2(
() =>
getDatasets({
parentId
}),
{
manual: false,
refreshDeps: [parentId]
}
);
const { data: folderDetail, runAsync: refetchFolderDetail } = useRequest2(
() => (parentId ? getDatasetById(parentId) : Promise.resolve(undefined)),
@@ -66,17 +86,6 @@ function DatasetContextProvider({ children }: { children: React.ReactNode }) {
refreshDeps: [parentId, myDatasets]
}
);
const getDatasetFolderList = useCallback(({ parentId }: GetResourceFolderListProps) => {
return getDatasets({
parentId,
type: DatasetTypeEnum.folder
}).then((res) => {
return res.map((item) => ({
id: item._id,
name: item.name
}));
});
}, []);
const { data: paths = [], runAsync: refetchPaths } = useRequest2(
() => getDatasetPaths(parentId),
@@ -87,21 +96,16 @@ function DatasetContextProvider({ children }: { children: React.ReactNode }) {
);
const { runAsync: refetchDatasets, loading: isFetchingDatasets } = useRequest2(
() => loadMyDatasets(parentId ?? undefined),
() => loadMyDatasets(),
{
manual: false,
refreshDeps: [parentId]
}
);
const [moveDatasetId, setMoveDatasetId] = useState<string>();
const { runAsync: onUpdateDataset } = useRequest2((data: DatasetUpdateBody) =>
putDatasetById(data).then(async (res) => {
await Promise.all([refetchDatasets(), refetchPaths()]);
return res;
})
);
const { runAsync: onUpdateDataset } = useRequest2(putDatasetById, {
onSuccess: () => Promise.all([refetchDatasets(), refetchPaths(), loadMyDatasets()])
});
const onMoveDataset = useCallback(
async (parentId: ParentIdType) => {
@@ -114,6 +118,18 @@ function DatasetContextProvider({ children }: { children: React.ReactNode }) {
[moveDatasetId, onUpdateDataset]
);
const getDatasetFolderList = useCallback(async ({ parentId }: GetResourceFolderListProps) => {
return (
await getDatasets({
parentId,
type: DatasetTypeEnum.folder
})
).map((item) => ({
id: item._id,
name: item.name
}));
}, []);
const [editedDataset, setEditedDataset] = useState<EditResourceInfoFormType>();
const { runAsync: onDelDataset } = useRequest2(delDatasetById, {
@@ -131,7 +147,10 @@ function DatasetContextProvider({ children }: { children: React.ReactNode }) {
folderDetail,
editedDataset,
setEditedDataset,
onDelDataset
onDelDataset,
onUpdateDataset,
myDatasets,
loadMyDatasets
};
return (

View File

@@ -5,7 +5,6 @@ import PageContainer from '@/components/PageContainer';
import { useTranslation } from 'next-i18next';
import { serviceSideProps } from '@/web/common/utils/i18n';
import ParentPaths from '@/components/common/folder/Path';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import List from './component/List';
import { DatasetsContext } from './context';
import DatasetContextProvider from './context';
@@ -14,13 +13,11 @@ import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { AddIcon } from '@chakra-ui/icons';
import { useUserStore } from '@/web/support/user/useUserStore';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { FolderIcon, FolderImgUrl } from '@fastgpt/global/common/file/image/constants';
import { FolderIcon } from '@fastgpt/global/common/file/image/constants';
import { EditFolderFormType } from '@fastgpt/web/components/common/MyModal/EditFolderModal';
import dynamic from 'next/dynamic';
import { postCreateDataset, putDatasetById } from '@/web/core/dataset/api';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { postCreateDatasetFolder, resumeInheritPer } from '@/web/core/dataset/api';
import FolderSlideCard from '@/components/common/folder/SlideCard';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import {
DatasetDefaultPermissionVal,
DatasetPermissionList
@@ -44,17 +41,18 @@ const Dataset = () => {
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { myDatasets } = useDatasetStore();
const {
myDatasets,
paths,
isFetchingDatasets,
refetchPaths,
refetchDatasets,
loadMyDatasets,
refetchFolderDetail,
folderDetail,
setEditedDataset,
setMoveDatasetId,
onDelDataset
onDelDataset,
onUpdateDataset
} = useContextSelector(DatasetsContext, (v) => v);
const { userInfo } = useUserStore();
@@ -139,7 +137,11 @@ const Dataset = () => {
{!!folderDetail && isPc && (
<Box ml="6">
<FolderSlideCard
refreshDeps={[folderDetail._id]}
resumeInheritPermission={() => resumeInheritPer(folderDetail._id)}
isInheritPermission={folderDetail.inheritPermission}
hasParent={!!folderDetail.parentId}
refetchResource={() => Promise.all([refetchFolderDetail(), loadMyDatasets()])}
refreshDeps={[folderDetail._id, folderDetail.inheritPermission]}
name={folderDetail.name}
intro={folderDetail.intro}
onEdit={() => {
@@ -165,7 +167,7 @@ const Dataset = () => {
value: folderDetail.defaultPermission,
defaultValue: DatasetDefaultPermissionVal,
onChange: (e) => {
return putDatasetById({
return onUpdateDataset({
id: folderDetail._id,
defaultPermission: e
});
@@ -192,7 +194,8 @@ const Dataset = () => {
deleteDatasetCollaborators({
datasetId: folderDetail._id,
tmbId
})
}),
refreshDeps: [folderDetail._id, folderDetail.inheritPermission]
}}
/>
</Box>
@@ -202,16 +205,14 @@ const Dataset = () => {
{!!editFolderData && (
<EditFolderModal
onClose={() => setEditFolderData(undefined)}
onCreate={async ({ name }) => {
onCreate={async ({ name, intro }) => {
try {
await postCreateDataset({
await postCreateDatasetFolder({
parentId: parentId || undefined,
name,
type: DatasetTypeEnum.folder,
avatar: FolderImgUrl,
intro: ''
intro: intro ?? ''
});
refetchDatasets();
loadMyDatasets();
refetchPaths();
} catch (error) {
return Promise.reject(error);
@@ -219,13 +220,11 @@ const Dataset = () => {
}}
onEdit={async ({ name, intro, id }) => {
try {
await putDatasetById({
await onUpdateDataset({
id,
name,
intro
});
refetchDatasets();
refetchPaths();
} catch (error) {
return Promise.reject(error);
}

View File

@@ -42,6 +42,7 @@ import type { GetDatasetListBody } from '@/pages/api/core/dataset/list';
import type { UpdateDatasetCollectionParams } from '@/pages/api/core/dataset/collection/update';
import type { GetDatasetDataListProps } from '@/pages/api/core/dataset/data/list';
import type { UpdateDatasetDataProps } from '@fastgpt/global/core/dataset/controller';
import type { DatasetFolderCreateBody } from '@/pages/api/core/dataset/folder/create';
/* ======================== dataset ======================= */
export const getDatasets = (data: GetDatasetListBody) =>
@@ -69,6 +70,12 @@ export const postWebsiteSync = (data: PostWebsiteSyncParams) =>
timeout: 600000
}).catch();
export const postCreateDatasetFolder = (data: DatasetFolderCreateBody) =>
POST(`/core/dataset/folder/create`, data);
export const resumeInheritPer = (datasetId: string) =>
GET(`/core/dataset/resumeInheritPermission`, { datasetId });
/* =========== search test ============ */
export const postSearchText = (data: SearchTestProps) =>
POST<SearchTestResponse>(`/core/dataset/searchTest`, data);

View File

@@ -26,7 +26,8 @@ export const defaultDatasetDetail: DatasetItemType = {
permission: new DatasetPermission(),
vectorModel: defaultVectorModels[0],
agentModel: defaultQAModels[0],
defaultPermission: DatasetDefaultPermissionVal
defaultPermission: DatasetDefaultPermissionVal,
inheritPermission: true
};
export const defaultCollectionDetail: DatasetCollectionItemType = {
@@ -47,7 +48,8 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
status: 'active',
vectorModel: defaultVectorModels[0].model,
agentModel: defaultQAModels[0].model,
defaultPermission: DatasetDefaultPermissionVal
defaultPermission: DatasetDefaultPermissionVal,
inheritPermission: true
},
parentId: '',
name: '',

View File

@@ -11,7 +11,7 @@ type State = {
allDatasets: DatasetSimpleItemType[];
loadAllDatasets: () => Promise<DatasetSimpleItemType[]>;
myDatasets: DatasetListItemType[];
loadMyDatasets: (parentId?: string) => Promise<any>;
loadMyDatasets: (parentId?: string) => Promise<DatasetListItemType[]>;
};
export const useDatasetStore = create<State>()(