mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-27 08:25:07 +00:00

* Aiproxy (#3649) * model config * feat: model config ui * perf: rename variable * feat: custom request url * perf: model buffer * perf: init model * feat: json model config * auto login * fix: ts * update packages * package * fix: dockerfile * feat: usage filter & export & dashbord (#3538) * feat: usage filter & export & dashbord * adjust ui * fix tmb scroll * fix code & selecte all * merge * perf: usages list;perf: move components (#3654) * perf: usages list * team sub plan load * perf: usage dashboard code * perf: dashboard ui * perf: move components * add default model config (#3653) * 4.8.20 test (#3656) * provider * perf: model config * model perf (#3657) * fix: model * dataset quote * perf: model config * model tag * doubao model config * perf: config model * feat: model test * fix: POST 500 error on dingtalk bot (#3655) * feat: default model (#3662) * move model config * feat: default model * fix: false triggerd org selection (#3661) * export usage csv i18n (#3660) * export usage csv i18n * fix build * feat: markdown extension (#3663) * feat: markdown extension * media cros * rerank test * default price * perf: default model * fix: cannot custom provider * fix: default model select * update bg * perf: default model selector * fix: usage export * i18n * fix: rerank * update init extension * perf: ip limit check * doubao model order * web default modle * perf: tts selector * perf: tts error * qrcode package * reload buffer (#3665) * reload buffer * reload buffer * tts selector * fix: err tip (#3666) * fix: err tip * perf: training queue * doc * fix interactive edge (#3659) * fix interactive edge * fix * comment * add gemini model * fix: chat model select * perf: supplement assistant empty response (#3669) * perf: supplement assistant empty response * check array * perf: max_token count;feat: support resoner output;fix: member scroll (#3681) * perf: supplement assistant empty response * check array * perf: max_token count * feat: support resoner output * member scroll * update provider order * i18n * fix: stream response (#3682) * perf: supplement assistant empty response * check array * fix: stream response * fix: model config cannot set to null * fix: reasoning response (#3684) * perf: supplement assistant empty response * check array * fix: reasoning response * fix: reasoning response * doc (#3685) * perf: supplement assistant empty response * check array * doc * lock * animation * update doc * update compose * doc * doc --------- Co-authored-by: heheer <heheer@sealos.io> Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
286 lines
7.0 KiB
TypeScript
286 lines
7.0 KiB
TypeScript
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
|
|
import { getResourcePermission, parseHeaderCert } from '../controller';
|
|
import {
|
|
CollectionWithDatasetType,
|
|
DatasetDataItemType,
|
|
DatasetSchemaType
|
|
} from '@fastgpt/global/core/dataset/type';
|
|
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
|
import { MongoDataset } from '../../../core/dataset/schema';
|
|
import { NullPermission, PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
|
|
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
|
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
|
|
import { getCollectionWithDataset } from '../../../core/dataset/controller';
|
|
import { MongoDatasetData } from '../../../core/dataset/data/schema';
|
|
import { AuthModeType, AuthResponseType } from '../type';
|
|
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
|
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
|
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
|
|
|
|
export const authDatasetByTmbId = async ({
|
|
tmbId,
|
|
datasetId,
|
|
per,
|
|
isRoot = false
|
|
}: {
|
|
tmbId: string;
|
|
datasetId: string;
|
|
per: PermissionValueType;
|
|
isRoot?: boolean;
|
|
}): Promise<{
|
|
dataset: DatasetSchemaType & {
|
|
permission: DatasetPermission;
|
|
};
|
|
}> => {
|
|
const dataset = await (async () => {
|
|
const [{ teamId, permission: tmbPer }, dataset] = await Promise.all([
|
|
getTmbInfoByTmbId({ tmbId }),
|
|
MongoDataset.findOne({ _id: datasetId }).lean()
|
|
]);
|
|
|
|
if (!dataset) {
|
|
return Promise.reject(DatasetErrEnum.unExist);
|
|
}
|
|
|
|
if (isRoot) {
|
|
return {
|
|
...dataset,
|
|
permission: new DatasetPermission({
|
|
isOwner: true
|
|
})
|
|
};
|
|
}
|
|
|
|
if (String(dataset.teamId) !== teamId) {
|
|
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
}
|
|
|
|
const isOwner = tmbPer.isOwner || String(dataset.tmbId) === String(tmbId);
|
|
|
|
// get dataset permission or inherit permission from parent folder.
|
|
const { Per } = await (async () => {
|
|
if (isOwner) {
|
|
return {
|
|
Per: new DatasetPermission({ isOwner: true })
|
|
};
|
|
}
|
|
if (
|
|
dataset.type === DatasetTypeEnum.folder ||
|
|
dataset.inheritPermission === false ||
|
|
!dataset.parentId
|
|
) {
|
|
// 1. is a folder. (Folders have compeletely permission)
|
|
// 2. inheritPermission is false.
|
|
// 3. is root folder/dataset.
|
|
const rp = await getResourcePermission({
|
|
teamId,
|
|
tmbId,
|
|
resourceId: datasetId,
|
|
resourceType: PerResourceTypeEnum.dataset
|
|
});
|
|
const Per = new DatasetPermission({
|
|
per: rp ?? DatasetDefaultPermissionVal,
|
|
isOwner
|
|
});
|
|
return {
|
|
Per
|
|
};
|
|
} else {
|
|
// is not folder and inheritPermission is true and is not root folder.
|
|
const { dataset: parent } = await authDatasetByTmbId({
|
|
tmbId,
|
|
datasetId: dataset.parentId,
|
|
per,
|
|
isRoot
|
|
});
|
|
|
|
const Per = new DatasetPermission({
|
|
per: parent.permission.value,
|
|
isOwner
|
|
});
|
|
|
|
return {
|
|
Per
|
|
};
|
|
}
|
|
})();
|
|
|
|
if (!Per.checkPer(per)) {
|
|
return Promise.reject(DatasetErrEnum.unAuthDataset);
|
|
}
|
|
|
|
return {
|
|
...dataset,
|
|
permission: Per
|
|
};
|
|
})();
|
|
|
|
return { dataset };
|
|
};
|
|
|
|
export const authDataset = async ({
|
|
datasetId,
|
|
per,
|
|
...props
|
|
}: AuthModeType & {
|
|
datasetId: ParentIdType;
|
|
per: PermissionValueType;
|
|
}): Promise<
|
|
AuthResponseType & {
|
|
dataset: DatasetSchemaType & {
|
|
permission: DatasetPermission;
|
|
};
|
|
}
|
|
> => {
|
|
const result = await parseHeaderCert(props);
|
|
const { tmbId } = result;
|
|
|
|
if (!datasetId) {
|
|
return Promise.reject(DatasetErrEnum.unExist);
|
|
}
|
|
|
|
const { dataset } = await authDatasetByTmbId({
|
|
tmbId,
|
|
datasetId,
|
|
per,
|
|
isRoot: result.isRoot
|
|
});
|
|
|
|
return {
|
|
...result,
|
|
permission: dataset.permission,
|
|
dataset
|
|
};
|
|
};
|
|
|
|
// the temporary solution for authDatasetCollection is getting the
|
|
export async function authDatasetCollection({
|
|
collectionId,
|
|
per = NullPermission,
|
|
isRoot = false,
|
|
...props
|
|
}: AuthModeType & {
|
|
collectionId: string;
|
|
isRoot?: boolean;
|
|
}): Promise<
|
|
AuthResponseType<DatasetPermission> & {
|
|
collection: CollectionWithDatasetType;
|
|
}
|
|
> {
|
|
const { teamId, tmbId, userId, isRoot: isRootFromHeader } = await parseHeaderCert(props);
|
|
const collection = await getCollectionWithDataset(collectionId);
|
|
|
|
if (!collection) {
|
|
return Promise.reject(DatasetErrEnum.unExist);
|
|
}
|
|
|
|
const { dataset } = await authDatasetByTmbId({
|
|
tmbId,
|
|
datasetId: collection.datasetId,
|
|
per,
|
|
isRoot: isRootFromHeader
|
|
});
|
|
|
|
return {
|
|
userId,
|
|
teamId,
|
|
tmbId,
|
|
collection,
|
|
permission: dataset.permission,
|
|
isRoot: isRootFromHeader
|
|
};
|
|
}
|
|
|
|
// export async function authDatasetFile({
|
|
// fileId,
|
|
// per,
|
|
// ...props
|
|
// }: AuthModeType & {
|
|
// fileId: string;
|
|
// }): Promise<
|
|
// AuthResponseType<DatasetPermission> & {
|
|
// file: DatasetFileSchema;
|
|
// }
|
|
// > {
|
|
// const { teamId, tmbId, isRoot } = await parseHeaderCert(props);
|
|
|
|
// const [file, collection] = await Promise.all([
|
|
// getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
|
|
// MongoDatasetCollection.findOne({
|
|
// teamId,
|
|
// fileId
|
|
// })
|
|
// ]);
|
|
|
|
// if (!file) {
|
|
// return Promise.reject(CommonErrEnum.fileNotFound);
|
|
// }
|
|
|
|
// if (!collection) {
|
|
// return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
|
// }
|
|
|
|
// try {
|
|
// const { permission } = await authDatasetCollection({
|
|
// ...props,
|
|
// collectionId: collection._id,
|
|
// per,
|
|
// isRoot
|
|
// });
|
|
|
|
// return {
|
|
// teamId,
|
|
// tmbId,
|
|
// file,
|
|
// permission,
|
|
// isRoot
|
|
// };
|
|
// } catch (error) {
|
|
// return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
|
// }
|
|
// }
|
|
|
|
/*
|
|
DatasetData permission is inherited from collection.
|
|
*/
|
|
export async function authDatasetData({
|
|
dataId,
|
|
...props
|
|
}: AuthModeType & {
|
|
dataId: string;
|
|
}) {
|
|
// get mongo dataset.data
|
|
const datasetData = await MongoDatasetData.findById(dataId);
|
|
|
|
if (!datasetData) {
|
|
return Promise.reject('core.dataset.error.Data not found');
|
|
}
|
|
|
|
const result = await authDatasetCollection({
|
|
...props,
|
|
collectionId: datasetData.collectionId
|
|
});
|
|
|
|
const data: DatasetDataItemType = {
|
|
id: String(datasetData._id),
|
|
teamId: datasetData.teamId,
|
|
updateTime: datasetData.updateTime,
|
|
q: datasetData.q,
|
|
a: datasetData.a,
|
|
chunkIndex: datasetData.chunkIndex,
|
|
indexes: datasetData.indexes,
|
|
datasetId: String(datasetData.datasetId),
|
|
collectionId: String(datasetData.collectionId),
|
|
sourceName: result.collection.name || '',
|
|
sourceId: result.collection?.fileId || result.collection?.rawLink,
|
|
isOwner: String(datasetData.tmbId) === String(result.tmbId)
|
|
// permission: result.permission
|
|
};
|
|
|
|
return {
|
|
...result,
|
|
datasetData: data,
|
|
collection: result.collection
|
|
};
|
|
}
|