Dataset Permission (#1786)

* feat: dataset controllers

feat: dataset schema

fix: add missing type to dataset schema
Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: dataset list api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: all dataset api

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: new auth dataset method

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: use new auth method in detail, paths.
feat: add new param defaultPermission to create api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: app auth params

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: use new auth method

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: new auth collection and file method

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset collection api new auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: create/*.ts auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* fix: import paths

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: dataset collaborator

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset frontend

feat: dataset list frontend

feat: dataset detail
Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: finish the dataset permission

fix: ts errors
Signed-off-by: FinleyGe <m13203533462@163.com>

* fix: empty response of collection api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: adjust the code

* chore: adjust the code

* chore: i18n

* fix: ts error

* fix: fe CollectionCard permission

---------

Signed-off-by: FinleyGe <m13203533462@163.com>
This commit is contained in:
Finley Ge
2024-06-20 20:52:03 +08:00
committed by GitHub
parent 2b25e3cc2d
commit 980b4d3db5
71 changed files with 12411 additions and 9993 deletions

View File

@@ -4,7 +4,8 @@ import { ErrType } from '../errorCode';
const startCode = 507000;
export enum CommonErrEnum {
fileNotFound = 'fileNotFound',
unAuthFile = 'unAuthFile'
unAuthFile = 'unAuthFile',
missingParams = 'missingParams'
}
const datasetErr = [
{
@@ -14,6 +15,10 @@ const datasetErr = [
{
statusText: CommonErrEnum.unAuthFile,
message: 'error.unAuthFile'
},
{
statusText: CommonErrEnum.missingParams,
message: 'error.missingParams'
}
];
export default datasetErr.reduce((acc, cur, index) => {

View File

@@ -2,6 +2,7 @@ import { ErrType } from '../errorCode';
/* dataset: 501000 */
export enum DatasetErrEnum {
unExist = 'unExistDataset',
unAuthDataset = 'unAuthDataset',
unCreateCollection = 'unCreateCollection',
unAuthDatasetCollection = 'unAuthDatasetCollection',
@@ -11,6 +12,10 @@ export enum DatasetErrEnum {
unLinkCollection = 'unLinkCollection'
}
const datasetErr = [
{
statusText: DatasetErrEnum.unExist,
message: 'core.dataset.error.unExistDataset'
},
{
statusText: DatasetErrEnum.unAuthDataset,
message: 'core.dataset.error.unAuthDataset'

View File

@@ -9,12 +9,13 @@ export type DatasetUpdateBody = {
name?: string;
avatar?: string;
intro?: string;
permission?: DatasetSchemaType['permission'];
permission?: DatasetSchemaType['permission']; // TODO: Should be deleted.
agentModel?: LLMModelItemType;
status?: DatasetSchemaType['status'];
websiteConfig?: DatasetSchemaType['websiteConfig'];
externalReadUrl?: DatasetSchemaType['externalReadUrl'];
defaultPermission?: DatasetSchemaType['defaultPermission'];
};
/* ================= collection ===================== */

View File

@@ -0,0 +1,11 @@
import { UpdateClbPermissionProps } from '../../support/permission/collaborator';
import { PermissionValueType } from '../../support/permission/type';
export type UpdateDatasetCollaboratorBody = UpdateClbPermissionProps & {
datasetId: string;
};
export type DatasetCollaboratorDeleteParams = {
datasetId: string;
tmbId: string;
};

View File

@@ -1,3 +1,4 @@
import { PermissionValueType } from 'support/permission/type';
import type { LLMModelItemType, VectorModelItemType } from '../../core/ai/model.d';
import { PermissionTypeEnum } from '../../support/permission/constant';
import { PushDatasetDataChunkProps } from './api';
@@ -8,6 +9,8 @@ import {
SearchScoreTypeEnum,
TrainingModeEnum
} from './constants';
import { DatasetPermission } from '../../support/permission/dataset/controller';
import { Permission } from '../../support/permission/controller';
/* schema */
export type DatasetSchemaType = {
@@ -24,7 +27,7 @@ export type DatasetSchemaType = {
intro: string;
type: DatasetTypeEnum;
status: `${DatasetStatusEnum}`;
permission: `${PermissionTypeEnum}`;
permission: DatasetPermission;
// metadata
websiteConfig?: {
@@ -32,6 +35,7 @@ export type DatasetSchemaType = {
selector: string;
};
externalReadUrl?: string;
defaultPermission: PermissionValueType;
};
export type DatasetCollectionSchemaType = {
@@ -132,24 +136,22 @@ export type DatasetListItemType = {
name: string;
intro: string;
type: DatasetTypeEnum;
isOwner: boolean;
canWrite: boolean;
permission: `${PermissionTypeEnum}`;
permission: DatasetPermission;
vectorModel: VectorModelItemType;
defaultPermission: PermissionValueType;
};
export type DatasetItemType = Omit<DatasetSchemaType, 'vectorModel' | 'agentModel'> & {
vectorModel: VectorModelItemType;
agentModel: LLMModelItemType;
isOwner: boolean;
canWrite: boolean;
};
/* ================= collection ===================== */
export type DatasetCollectionItemType = CollectionWithDatasetType & {
canWrite: boolean;
sourceName: string;
sourceId?: string;
file?: DatasetFileSchema;
permission: DatasetPermission;
};
/* ================= data ===================== */

View File

@@ -0,0 +1,20 @@
import { NullPermission, PermissionKeyEnum, PermissionList } from '../constant';
export enum DatasetPermissionKeyEnum {}
export const DatasetPermissionList = {
[PermissionKeyEnum.read]: {
...PermissionList[PermissionKeyEnum.read],
description: '可查看知识库内容'
},
[PermissionKeyEnum.write]: {
...PermissionList[PermissionKeyEnum.write],
description: '可增加和变更知识库内容'
},
[PermissionKeyEnum.manage]: {
...PermissionList[PermissionKeyEnum.manage],
description: '可管理整个知识库数据和信息'
}
};
export const DatasetDefaultPermission = NullPermission;

View File

@@ -0,0 +1,14 @@
import { NullPermission } from '../constant';
import { PerConstructPros, Permission } from '../controller';
export class DatasetPermission extends Permission {
constructor(props?: PerConstructPros) {
if (!props) {
props = {
per: NullPermission
};
} else if (!props?.per) {
props.per = NullPermission;
}
super(props);
}
}

View File

@@ -12,6 +12,7 @@ import {
TeamMemberCollectionName
} from '@fastgpt/global/support/user/team/constant';
import { PermissionTypeEnum, PermissionTypeMap } from '@fastgpt/global/support/permission/constant';
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
export const DatasetCollectionName = 'datasets';
@@ -90,7 +91,11 @@ const DatasetSchema = new Schema({
}
}
},
externalReadUrl: String
externalReadUrl: String,
defaultPermission: {
type: Number,
default: DatasetDefaultPermission
}
});
try {

View File

@@ -39,7 +39,6 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
if (source === PluginSourceEnum.personal) {
await authAppByTmbId({
appId: pluginId,
teamId: workflowApp.teamId,
tmbId: workflowApp.tmbId,
per: ReadPermissionVal
});

View File

@@ -2,7 +2,6 @@ import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type/index.d';
import { SelectAppItemType } from '@fastgpt/global/core/workflow/type/index.d';
import { dispatchWorkFlow } from '../index';
import { MongoApp } from '../../../../core/app/schema';
import { responseWrite } from '../../../../common/response';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
@@ -48,7 +47,6 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
// 检查该工作流的tmb是否有调用该app的权限不是校验对话的人是否有权限
const { app: appData } = await authAppByTmbId({
appId: app.id,
teamId: workflowApp.teamId,
tmbId: workflowApp.tmbId,
per: ReadPermissionVal
});

View File

@@ -12,17 +12,15 @@ import { AuthResponseType } from '../type/auth.d';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
export const authAppByTmbId = async ({
teamId,
tmbId,
appId,
per
}: {
teamId: string;
tmbId: string;
appId: string;
per: PermissionValueType;
}) => {
const { permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
const { teamId, permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
const app = await (async () => {
// get app and per
@@ -68,10 +66,9 @@ export const authApp = async ({
}
> => {
const result = await parseHeaderCert(props);
const { teamId, tmbId } = result;
const { tmbId } = result;
const { app } = await authAppByTmbId({
teamId,
tmbId,
appId,
per

View File

@@ -1,201 +0,0 @@
import { AuthModeType } from '../type';
import { parseHeaderCert } from '../controller';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { MongoDataset } from '../../../core/dataset/schema';
import { getCollectionWithDataset } from '../../../core/dataset/controller';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { AuthResponseType } from '@fastgpt/global/support/permission/type';
import {
CollectionWithDatasetType,
DatasetFileSchema,
DatasetSchemaType
} from '@fastgpt/global/core/dataset/type';
import { getFileById } from '../../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { getTmbInfoByTmbId } from '../../user/team/controller';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoDatasetCollection } from '../../../core/dataset/collection/schema';
export async function authDatasetByTmbId({
teamId,
tmbId,
datasetId,
per
}: {
teamId: string;
tmbId: string;
datasetId: string;
per: AuthModeType['per'];
}) {
const { role } = await getTmbInfoByTmbId({ tmbId });
const { dataset, isOwner, canWrite } = await (async () => {
const dataset = await MongoDataset.findOne({ _id: datasetId, teamId }).lean();
if (!dataset) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
const isOwner =
role !== TeamMemberRoleEnum.visitor &&
(String(dataset.tmbId) === tmbId || role === TeamMemberRoleEnum.owner);
const canWrite =
isOwner ||
(role !== TeamMemberRoleEnum.visitor && dataset.permission === PermissionTypeEnum.public);
if (per === 'r') {
if (!isOwner && dataset.permission !== PermissionTypeEnum.public) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
}
if (per === 'w' && !canWrite) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
if (per === 'owner' && !isOwner) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
return { dataset, isOwner, canWrite };
})();
return {
dataset,
isOwner,
canWrite
};
}
export async function authDataset({
datasetId,
per = 'owner',
...props
}: AuthModeType & {
datasetId: string;
}): Promise<
AuthResponseType & {
dataset: DatasetSchemaType;
}
> {
const result = await parseHeaderCert(props);
const { teamId, tmbId } = result;
const { dataset, isOwner, canWrite } = await authDatasetByTmbId({
teamId,
tmbId,
datasetId,
per
});
return {
...result,
dataset,
isOwner,
canWrite
};
}
/*
Read: in team and dataset permission is public
Write: in team, not visitor and dataset permission is public
*/
export async function authDatasetCollection({
collectionId,
per = 'owner',
...props
}: AuthModeType & {
collectionId: string;
}): Promise<
AuthResponseType & {
collection: CollectionWithDatasetType;
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const { role } = await getTmbInfoByTmbId({ tmbId });
const { collection, isOwner, canWrite } = await (async () => {
const collection = await getCollectionWithDataset(collectionId);
if (!collection || String(collection.teamId) !== teamId) {
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
}
const isOwner = String(collection.tmbId) === tmbId || role === TeamMemberRoleEnum.owner;
const canWrite =
isOwner ||
(role !== TeamMemberRoleEnum.visitor &&
collection.datasetId.permission === PermissionTypeEnum.public);
if (per === 'r') {
if (!isOwner && collection.datasetId.permission !== PermissionTypeEnum.public) {
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
}
}
if (per === 'w' && !canWrite) {
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
}
if (per === 'owner' && !isOwner) {
return Promise.reject(DatasetErrEnum.unAuthDatasetCollection);
}
return {
collection,
isOwner,
canWrite
};
})();
return {
teamId,
tmbId,
collection,
isOwner,
canWrite
};
}
export async function authDatasetFile({
fileId,
per = 'owner',
...props
}: AuthModeType & {
fileId: string;
}): Promise<
AuthResponseType & {
file: DatasetFileSchema;
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const [file, collection] = await Promise.all([
getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
MongoDatasetCollection.findOne({
teamId,
fileId
})
]);
if (!file) {
return Promise.reject(CommonErrEnum.fileNotFound);
}
if (!collection) {
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
}
// file role = collection role
try {
const { isOwner, canWrite } = await authDatasetCollection({
...props,
collectionId: collection._id,
per
});
return {
teamId,
tmbId,
file,
isOwner,
canWrite
};
} catch (error) {
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
}
}

View File

@@ -0,0 +1,213 @@
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
import { getResourcePermission, parseHeaderCert } from '../controller';
import { AuthPropsType, AuthResponseType } from '../type/auth';
import {
CollectionWithDatasetType,
DatasetDataItemType,
DatasetFileSchema,
DatasetSchemaType
} from '@fastgpt/global/core/dataset/type';
import { getTmbInfoByTmbId } from '../../user/team/controller';
import { MongoDataset } from '../../../core/dataset/schema';
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import { getCollectionWithDataset } from '../../../core/dataset/controller';
import { MongoDatasetCollection } from '../../../core/dataset/collection/schema';
import { getFileById } from '../../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoDatasetData } from '../../../core/dataset/data/schema';
export async function authDatasetByTmbId({
tmbId,
datasetId,
per
}: {
tmbId: string;
datasetId: string;
per: PermissionValueType;
}) {
const { teamId, permission: tmbPer } = await getTmbInfoByTmbId({ tmbId });
const dataset = await (async () => {
// get app and per
const [dataset, rp] = await Promise.all([
MongoDataset.findOne({ _id: datasetId, teamId }).lean(),
getResourcePermission({
teamId,
tmbId,
resourceId: datasetId,
resourceType: PerResourceTypeEnum.dataset
}) // this could be null
]);
if (!dataset) {
return Promise.reject(DatasetErrEnum.unExist);
}
const isOwner = tmbPer.isOwner || String(dataset.tmbId) === tmbId;
const Per = new DatasetPermission({
per: rp?.permission ?? dataset.defaultPermission,
isOwner
});
if (!Per.checkPer(per)) {
return Promise.reject(DatasetErrEnum.unAuthDataset);
}
return {
...dataset,
permission: Per
};
})();
return { dataset: dataset };
}
// Auth Dataset
export async function authDataset({
datasetId,
per,
...props
}: AuthPropsType & {
datasetId: string;
}): Promise<
AuthResponseType<DatasetPermission> & {
dataset: DatasetSchemaType;
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const { dataset } = await authDatasetByTmbId({
tmbId,
datasetId,
per
});
return {
teamId,
tmbId,
dataset,
permission: dataset.permission
};
}
// the temporary solution for authDatasetCollection is getting the
export async function authDatasetCollection({
collectionId,
per,
...props
}: AuthPropsType & {
collectionId: string;
}): Promise<
AuthResponseType<DatasetPermission> & {
collection: CollectionWithDatasetType;
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const collection = await getCollectionWithDataset(collectionId);
if (!collection) {
return Promise.reject(DatasetErrEnum.unExist);
}
const { dataset } = await authDatasetByTmbId({
tmbId,
datasetId: collection.datasetId._id,
per
});
return {
teamId,
tmbId,
collection,
permission: dataset.permission
};
}
export async function authDatasetFile({
fileId,
per,
...props
}: AuthPropsType & {
fileId: string;
}): Promise<
AuthResponseType<DatasetPermission> & {
file: DatasetFileSchema;
}
> {
const { teamId, tmbId } = await parseHeaderCert(props);
const [file, collection] = await Promise.all([
getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
MongoDatasetCollection.findOne({
teamId,
fileId
})
]);
if (!file) {
return Promise.reject(CommonErrEnum.fileNotFound);
}
if (!collection) {
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
}
try {
const { permission } = await authDatasetCollection({
...props,
collectionId: collection._id,
per
});
return {
teamId,
tmbId,
file,
permission
};
} catch (error) {
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
}
}
export async function authDatasetData({
dataId,
...props
}: AuthPropsType & {
dataId: string;
}) {
// get mongo dataset.data
const datasetData = await MongoDatasetData.findById(dataId);
if (!datasetData) {
return Promise.reject('core.dataset.error.Data not found');
}
const result = await authDatasetCollection({
...props,
collectionId: datasetData.collectionId
});
const data: DatasetDataItemType = {
id: String(datasetData._id),
teamId: datasetData.teamId,
q: datasetData.q,
a: datasetData.a,
chunkIndex: datasetData.chunkIndex,
indexes: datasetData.indexes,
datasetId: String(datasetData.datasetId),
collectionId: String(datasetData.collectionId),
sourceName: result.collection.name || '',
sourceId: result.collection?.fileId || result.collection?.rawLink,
isOwner: String(datasetData.tmbId) === result.tmbId,
canWrite: result.permission.hasWritePer
};
return {
...result,
datasetData: data
};
}

View File

@@ -31,7 +31,6 @@ export async function authOutLinkCrud({
}
const { app } = await authAppByTmbId({
teamId,
tmbId,
appId: outLink.appId,
per: ManagePermissionVal

View File

@@ -11,11 +11,11 @@ export type AuthPropsType = {
per: PermissionValueType;
};
export type AuthResponseType = {
export type AuthResponseType<T = Permission> = {
teamId: string;
tmbId: string;
authType?: `${AuthUserTypeEnum}`;
appId?: string;
apikey?: string;
permission: Permission;
permission: T;
};

18489
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,9 +1,9 @@
/** @type {import('next').NextConfig} */
const { i18n } = require('./next-i18next.config');
const path = require('path');
const isDev = process.env.NODE_ENV === 'development';
/** @type {import('next').NextConfig} */
const nextConfig = {
i18n,
output: 'standalone',

View File

@@ -11,6 +11,7 @@ import {
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
/* ================= dataset ===================== */
export type CreateDatasetParams = {
@@ -21,6 +22,7 @@ export type CreateDatasetParams = {
avatar: string;
vectorModel?: string;
agentModel?: string;
defaultPermission?: PermissionValueType;
};
export type RebuildEmbeddingProps = {

View File

@@ -3,6 +3,7 @@ import {
DatasetCollectionSchemaType,
DatasetDataSchemaType
} from '@fastgpt/global/core/dataset/type.d';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
/* ================= dataset ===================== */
@@ -18,7 +19,7 @@ export type DatasetCollectionsListItemType = {
trainingAmount: number;
fileId?: string;
rawLink?: string;
canWrite: boolean;
permission: DatasetPermission;
};
/* ================= data ===================== */

View File

@@ -1,31 +1,66 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import {
PerResourceTypeEnum,
ReadPermissionVal
} from '@fastgpt/global/support/permission/constant';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
/* get all dataset by teamId or tmbId */
async function handler(
req: NextApiRequest,
res: NextApiResponse<any>
): Promise<DatasetSimpleItemType[]> {
// 凭证校验
const { teamId, tmbId, permission } = await authUserPer({
async function handler(req: NextApiRequest): Promise<DatasetSimpleItemType[]> {
const {
teamId,
tmbId,
permission: tmbPer
} = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: ReadPermissionVal
});
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, permission }),
type: { $ne: DatasetTypeEnum.folder }
}).lean();
const [myDatasets, rpList] = await Promise.all([
MongoDataset.find({
teamId,
type: {
$ne: DatasetTypeEnum.folder
}
})
.sort({
updateTime: -1
})
.lean(),
MongoResourcePermission.find({
resourceType: PerResourceTypeEnum.dataset,
teamId,
tmbId
}).lean()
]);
return datasets.map((item) => ({
const filterDatasets = myDatasets
.map((dataset) => {
const perVal = rpList.find(
(item) => String(item.resourceId) === String(dataset._id)
)?.permission;
const Per = new DatasetPermission({
per: perVal ?? dataset.defaultPermission,
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
});
return {
...dataset,
permission: Per
};
})
.filter((app) => app.permission.hasReadPer);
return filterDatasets.map((item) => ({
_id: item._id,
avatar: item.avatar,
name: item.name,

View File

@@ -1,16 +1,11 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const body = req.body as CreateDatasetCollectionParams;
const { teamId, tmbId } = await authDataset({
@@ -18,7 +13,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
per: WritePermissionVal
});
const { _id } = await createOneCollection({
@@ -26,14 +21,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
teamId,
tmbId
});
return _id;
}
jsonRes(res, {
data: _id
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,8 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
@@ -18,19 +16,17 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
const trainingType = TrainingModeEnum.chunk;
try {
await connectToDatabase();
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
per: 'w',
per: WritePermissionVal,
datasetId: datasetId
});
@@ -103,12 +99,5 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
return collectionId;
});
jsonRes(res);
} catch (error) {
jsonRes(res, {
code: 500,
error
});
}
}
export default NextAPI(handler);

View File

@@ -1,8 +1,5 @@
import type { NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
@@ -23,11 +20,9 @@ import { MongoRawTextBuffer } from '@fastgpt/service/common/buffer/rawText/schem
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
import { NextAPI } from '@/service/middleware/entry';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(
req: ApiRequestProps<FileIdCreateDatasetCollectionParams>,
res: NextApiResponse<any>
) {
async function handler(req: ApiRequestProps<FileIdCreateDatasetCollectionParams>) {
const {
fileId,
trainingType = TrainingModeEnum.chunk,
@@ -37,13 +32,11 @@ async function handler(
...body
} = req.body;
await connectToDatabase();
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
per: 'w',
per: WritePermissionVal,
datasetId: body.datasetId
});
@@ -137,13 +130,10 @@ async function handler(
}
);
return collectionId;
});
// remove buffer
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
jsonRes(res);
return collectionId;
});
}
export default NextAPI(handler);

View File

@@ -1,11 +1,6 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
TrainingModeEnum,
@@ -18,10 +13,10 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const {
link,
trainingType = TrainingModeEnum.chunk,
@@ -36,7 +31,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
per: WritePermissionVal
});
// 1. check dataset limit
@@ -45,7 +40,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
insertLen: predictDataLimitLength(trainingType, new Array(10))
});
const { _id: collectionId } = await mongoSessionRun(async (session) => {
await mongoSessionRun(async (session) => {
// 2. create collection
const collection = await createOneCollection({
...body,
@@ -87,14 +82,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
return collection;
});
}
jsonRes(res, {
data: { collectionId }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,8 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -23,6 +22,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
/**
@@ -49,7 +49,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
req,
authToken: true,
authApiKey: true,
per: 'w',
per: WritePermissionVal,
datasetId: data.datasetId
});
@@ -168,9 +168,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
};
});
jsonRes(res, {
data: { collectionId, results: insertResults }
});
return { collectionId, results: insertResults };
} catch (error) {
removeFilesByPaths(filePaths);

View File

@@ -1,11 +1,6 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
TrainingModeEnum,
@@ -20,10 +15,10 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const {
name,
text,
@@ -39,7 +34,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
per: WritePermissionVal
});
// 1. split text to chunks
@@ -107,15 +102,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
return { collectionId, results: insertResults };
});
jsonRes(res, {
data: createResult
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return createResult;
}
export const config = {
@@ -125,3 +112,5 @@ export const config = {
}
}
};
export default NextAPI(handler);

View File

@@ -1,19 +1,17 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { id: collectionId } = req.query as { id: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
return Promise.reject(CommonErrEnum.missingParams);
}
const { teamId, collection } = await authDatasetCollection({
@@ -21,7 +19,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
// find all delete id
@@ -39,12 +37,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
session
})
);
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,31 +1,30 @@
/*
Get one dataset collection detail
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
import type { NextApiRequest } from 'next';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { getFileById } from '@fastgpt/service/common/file/gridfs/controller';
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest): Promise<DatasetCollectionItemType> {
const { id } = req.query as { id: string };
if (!id) {
throw new Error('Id is required');
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { collection, canWrite } = await authDatasetCollection({
const { collection, permission } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'r'
per: ReadPermissionVal
});
// get file
@@ -33,18 +32,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
: undefined;
jsonRes<DatasetCollectionItemType>(res, {
data: {
return {
...collection,
canWrite,
...getCollectionSourceData(collection),
permission,
file
};
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,21 +1,17 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
import { Types } from '@fastgpt/service/common/mongo';
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
import { PagingData } from '@/types';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
let {
pageNum = 1,
pageSize = 10,
@@ -29,12 +25,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
pageSize = Math.min(pageSize, 30);
// auth dataset and get my role
const { teamId, tmbId, canWrite } = await authDataset({
const { teamId, permission } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
const match = {
@@ -56,8 +52,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
updateTime: -1
})
.lean();
return jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
data: {
return {
pageNum,
pageSize,
data: await Promise.all(
@@ -65,12 +60,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
...item,
dataAmount: 0,
trainingAmount: 0,
canWrite // admin or team owner can write
permission
}))
),
total: await MongoDatasetCollection.countDocuments(match)
}
});
};
}
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
@@ -151,9 +145,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
]);
const data = await Promise.all(
collections.map(async (item, i) => ({
collections.map(async (item) => ({
...item,
canWrite: String(item.tmbId) === tmbId || canWrite
permission
}))
);
@@ -162,18 +156,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
// count collections
jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
data: {
return {
pageNum,
pageSize,
data,
total
};
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,34 +1,24 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
import type { NextApiRequest } from 'next';
import { getDatasetCollectionPaths } from '@fastgpt/service/core/dataset/collection/utils';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest) {
const { parentId } = req.query as { parentId: string };
if (!parentId) {
return jsonRes(res, {
data: []
});
return [];
}
await authDatasetCollection({ req, authToken: true, collectionId: parentId, per: 'r' });
await authDatasetCollection({
req,
authToken: true,
collectionId: parentId,
per: ReadPermissionVal
});
const paths = await getDatasetCollectionPaths({
parentId
});
jsonRes<ParentTreePathItemType[]>(res, {
data: paths
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return paths;
}

View File

@@ -1,9 +1,10 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { createFileToken } from '@fastgpt/service/support/permission/controller';
import { BucketNameEnum, ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type readCollectionSourceQuery = {
collectionId: string;
@@ -17,15 +18,14 @@ export type readCollectionSourceResponse = {
};
async function handler(
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>,
res: ApiResponseType<any>
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>
): Promise<readCollectionSourceResponse> {
const { collection, teamId, tmbId } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: req.query.collectionId,
per: 'r'
per: ReadPermissionVal
});
const sourceUrl = await (async () => {

View File

@@ -1,7 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import {
getCollectionAndRawText,
reloadCollectionChunks
@@ -17,22 +15,22 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { collectionId } = req.body as { collectionId: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
return Promise.reject(CommonErrEnum.missingParams);
}
const { collection, tmbId } = await authDatasetCollection({
req,
authToken: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
@@ -44,9 +42,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
if (isSameRawText) {
return jsonRes(res, {
data: DatasetCollectionSyncResultEnum.sameRaw
});
return DatasetCollectionSyncResultEnum.sameRaw;
}
/* Not the same original text, create and reload */
@@ -102,13 +98,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
});
jsonRes(res, {
data: DatasetCollectionSyncResultEnum.success
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return DatasetCollectionSyncResultEnum.success;
}
export default NextAPI(handler);

View File

@@ -1,18 +1,17 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { UpdateDatasetCollectionParams } from '@/global/core/api/datasetReq.d';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { getCollectionUpdateTime } from '@fastgpt/service/core/dataset/collection/utils';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
if (!id) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
@@ -21,7 +20,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
collectionId: id,
per: 'w'
per: WritePermissionVal
});
const updateFields: Record<string, any> = {
@@ -32,12 +31,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await MongoDatasetCollection.findByIdAndUpdate(id, {
$set: updateFields
});
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,6 +1,4 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -8,18 +6,18 @@ import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NullPermission, WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const {
parentId,
name,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel = getDatasetModel().model
agentModel = getDatasetModel().model,
defaultPermission = NullPermission
} = req.body as CreateDatasetParams;
// auth
@@ -34,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const vectorModelStore = getVectorModel(vectorModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid');
throw new Error('vectorModel or qaModel is invalid'); // TODO: use enum code
}
// check limit
@@ -48,7 +46,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
agentModel,
avatar,
parentId: parentId || null,
type
type,
defaultPermission
});
if (type === DatasetTypeEnum.dataset) {
@@ -59,11 +58,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
});
}
jsonRes(res, { data: _id });
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return _id;
}
export default NextAPI(handler);

View File

@@ -1,32 +1,31 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
import { deleteDatasetData } from '@/service/core/dataset/data/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id: dataId } = req.query as {
id: string;
};
if (!dataId) {
throw new Error('dataId is required');
Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { teamId, datasetData } = await authDatasetData({
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'w'
per: WritePermissionVal
});
await deleteDatasetData(datasetData);
jsonRes(res, {
data: 'success'
});
return 'success';
}
export default NextAPI(handler);

View File

@@ -1,8 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
export type Response = {
id: string;
@@ -11,7 +10,7 @@ export type Response = {
source: string;
};
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id: dataId } = req.query as {
id: string;
};
@@ -22,12 +21,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
dataId,
per: 'r'
per: ReadPermissionVal
});
jsonRes(res, {
data: datasetData
});
return datasetData;
}
export default NextAPI(handler);

View File

@@ -2,30 +2,30 @@
insert one data to dataset (immediately insert)
manual input or mark data
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { hasSameValue } from '@/service/core/dataset/data/utils';
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { collectionId, q, a, indexes } = req.body as InsertOneDatasetDataProps;
if (!q) {
throw new Error('q is required');
Promise.reject(CommonErrEnum.missingParams);
}
if (!collectionId) {
throw new Error('collectionId is required');
Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
@@ -34,7 +34,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
await checkDatasetLimit({
@@ -93,9 +93,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
model: vectorModelData.model
});
jsonRes<string>(res, {
data: insertId
});
return insertId;
}
export default NextAPI(handler);

View File

@@ -1,15 +1,12 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import type { NextApiRequest } from 'next';
import type { GetDatasetDataListProps } from '@/global/core/api/datasetReq';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { PagingData } from '@/types';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
let {
pageNum = 1,
pageSize = 10,
@@ -25,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'r'
per: ReadPermissionVal
});
searchText = replaceRegChars(searchText).replace(/'/g, '');
@@ -50,14 +47,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
MongoDatasetData.countDocuments(match)
]);
jsonRes<PagingData<DatasetDataListItemType>>(res, {
data: {
return {
pageNum,
pageSize,
data,
total
}
});
};
}
export default NextAPI(handler);

View File

@@ -5,11 +5,12 @@ import type {
PushDatasetDataProps,
PushDatasetDataResponse
} from '@fastgpt/global/core/dataset/api.d';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const body = req.body as PushDatasetDataProps;
@@ -29,7 +30,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
// auth dataset limit

View File

@@ -1,14 +1,13 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { updateData2Dataset } from '@/service/core/dataset/data/controller';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
// auth data permission
@@ -23,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
dataId: id,
per: 'w'
per: WritePermissionVal
});
// auth team balance
@@ -46,8 +45,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
tokens,
model: vectorModel
});
jsonRes(res);
}
export default NextAPI(handler);

View File

@@ -1,21 +1,20 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { id: datasetId } = req.query as {
id: string;
};
if (!datasetId) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
// auth owner
@@ -24,7 +23,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
authToken: true,
authApiKey: true,
datasetId,
per: 'owner'
per: OwnerPermissionVal
});
const datasets = await findDatasetAndAllChildren({
@@ -43,12 +42,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
{ session }
);
});
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,43 +1,39 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
type Query = {
id: string;
};
async function handler(req: ApiRequestProps<Query>): Promise<DatasetItemType> {
const { id: datasetId } = req.query as {
id: string;
};
if (!datasetId) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { dataset, canWrite, isOwner } = await authDataset({
const { dataset, permission } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
jsonRes<DatasetItemType>(res, {
data: {
return {
...dataset,
permission,
vectorModel: getVectorModel(dataset.vectorModel),
agentModel: getLLMModel(dataset.agentModel),
canWrite,
isOwner
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
agentModel: getLLMModel(dataset.agentModel)
};
}
export default NextAPI(handler);

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { responseWriteController } from '@fastgpt/service/common/response';
import { addLog } from '@fastgpt/service/common/system/log';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
import {
@@ -9,6 +9,8 @@ import {
updateExportDatasetLimit
} from '@fastgpt/service/support/user/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
let { datasetId } = req.query as {
@@ -16,11 +18,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
};
if (!datasetId || !global.pgClient) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
const { teamId } = await authDataset({
req,
authToken: true,
datasetId,
per: WritePermissionVal
});
await checkExportDatasetLimit({
teamId,

View File

@@ -1,24 +1,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetFile } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type.d';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
import type { NextApiRequest } from 'next';
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
async function handler(req: NextApiRequest) {
const { fileId } = req.query as { fileId: string };
// 凭证校验
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: 'r' });
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: ReadPermissionVal });
jsonRes<DatasetFileSchema>(res, {
data: file
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return file;
}
export default NextAPI(handler);

View File

@@ -1,10 +1,10 @@
import type { NextApiResponse } from 'next';
import { authFile } from '@fastgpt/service/support/permission/auth/file';
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { rawText2Chunks, readDatasetSourceRawText } from '@fastgpt/service/core/dataset/read';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { NextAPI } from '@/service/middleware/entry';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type PostPreviewFilesChunksProps = {
type: DatasetSourceReadTypeEnum;
@@ -21,8 +21,7 @@ export type PreviewChunksResponse = {
}[];
async function handler(
req: ApiRequestProps<PostPreviewFilesChunksProps>,
res: NextApiResponse<any>
req: ApiRequestProps<PostPreviewFilesChunksProps>
): Promise<PreviewChunksResponse> {
const { type, sourceId, chunkSize, customSplitChar, overlapRatio, selector, isQAImport } =
req.body;
@@ -36,7 +35,13 @@ async function handler(
const { teamId } = await (async () => {
if (type === DatasetSourceReadTypeEnum.fileLocal) {
return authFile({ req, authToken: true, authApiKey: true, fileId: sourceId });
return authDatasetFile({
req,
authToken: true,
authApiKey: true,
fileId: sourceId,
per: ReadPermissionVal
});
}
return authCert({ req, authApiKey: true, authToken: true });
})();

View File

@@ -1,34 +1,68 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import type { NextApiRequest } from 'next';
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import {
PerResourceTypeEnum,
ReadPermissionVal
} from '@fastgpt/global/support/permission/constant';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
// 凭证校验
const { teamId, tmbId, permission } = await authUserPer({
const {
teamId,
tmbId,
permission: tmbPer
} = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: ReadPermissionVal
});
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, permission }),
...(parentId !== undefined && { parentId: parentId || null }),
const [myDatasets, rpList] = await Promise.all([
MongoDataset.find({
teamId,
...parseParentIdInMongo(parentId),
...(type && { type })
})
.sort({ updateTime: -1 })
.lean();
.sort({
updateTime: -1
})
.lean(),
MongoResourcePermission.find({
resourceType: PerResourceTypeEnum.dataset,
teamId,
tmbId
}).lean()
]);
const filterDatasets = myDatasets
.map((dataset) => {
const perVal = rpList.find(
(item) => String(item.resourceId) === String(dataset._id)
)?.permission;
const Per = new DatasetPermission({
per: perVal ?? dataset.defaultPermission,
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
});
return {
...dataset,
permission: Per
};
})
.filter((app) => app.permission.hasReadPer);
const data = await Promise.all(
datasets.map<DatasetListItemType>((item) => ({
filterDatasets.map<DatasetListItemType>((item) => ({
_id: item._id,
parentId: item.parentId,
avatar: item.avatar,
@@ -36,15 +70,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
intro: item.intro,
type: item.type,
permission: item.permission,
canWrite: permission.hasWritePer,
isOwner: permission.isOwner || String(item.tmbId) === tmbId,
vectorModel: getVectorModel(item.vectorModel)
vectorModel: getVectorModel(item.vectorModel),
defaultPermission: item.defaultPermission
}))
);
jsonRes<DatasetListItemType[]>(res, {
data
});
return data;
}
export default NextAPI(handler);

View File

@@ -1,33 +1,20 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
async function handler(req: NextApiRequest) {
const { parentId } = req.query as { parentId: string };
if (!parentId) {
return jsonRes(res, {
data: []
});
return [];
}
await authDataset({ req, authToken: true, datasetId: parentId, per: 'r' });
await authDataset({ req, authToken: true, datasetId: parentId, per: ReadPermissionVal });
jsonRes<ParentTreePathItemType[]>(res, {
data: await getParents(parentId)
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return await getParents(parentId);
}
async function getParents(parentId?: string): Promise<ParentTreePathItemType[]> {
@@ -44,3 +31,5 @@ async function getParents(parentId?: string): Promise<ParentTreePathItemType[]>
return paths;
}
export default NextAPI(handler);

View File

@@ -1,7 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import type { SearchTestProps } from '@/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
@@ -13,8 +12,10 @@ import {
checkTeamReRankPermission
} from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const {
datasetId,
text,
@@ -29,8 +30,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
} = req.body as SearchTestProps;
if (!datasetId || !text) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
const start = Date.now();
// auth dataset role
@@ -39,7 +41,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
// auth balance
await checkTeamAIPoints(teamId);
@@ -88,14 +90,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
});
}
jsonRes<SearchTestResponse>(res, {
data: {
return {
list: searchRes,
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
queryExtensionModel: aiExtensionResult?.model,
...result
}
});
};
}
export default NextAPI(handler);

View File

@@ -1,10 +1,9 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
type Props = {};
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type getDatasetTrainingQueueResponse = {
rebuildingCount: number;
@@ -12,8 +11,7 @@ export type getDatasetTrainingQueueResponse = {
};
async function handler(
req: ApiRequestProps<any, { datasetId: string }>,
res: ApiResponseType<any>
req: ApiRequestProps<any, { datasetId: string }>
): Promise<getDatasetTrainingQueueResponse> {
const { datasetId } = req.query;
@@ -22,7 +20,7 @@ async function handler(
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
const [rebuildingCount, trainingCount] = await Promise.all([

View File

@@ -1,13 +1,10 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
await authCert({ req, authToken: true });
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
@@ -31,16 +28,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
jsonRes(res, {
data: {
return {
vectorTrainingCount,
agentTrainingCount
};
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,5 +1,5 @@
import { NextAPI } from '@/service/middleware/entry';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
@@ -8,7 +8,8 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
export type rebuildEmbeddingBody = {
datasetId: string;
@@ -17,10 +18,7 @@ export type rebuildEmbeddingBody = {
export type Response = {};
async function handler(
req: ApiRequestProps<rebuildEmbeddingBody>,
res: ApiResponseType<any>
): Promise<Response> {
async function handler(req: ApiRequestProps<rebuildEmbeddingBody>): Promise<Response> {
const { datasetId, vectorModel } = req.body;
const { teamId, tmbId, dataset } = await authDataset({
@@ -28,7 +26,7 @@ async function handler(
authToken: true,
authApiKey: true,
datasetId,
per: 'owner'
per: OwnerPermissionVal
});
// check vector model

View File

@@ -1,36 +1,40 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { NextAPI } from '@/service/middleware/entry';
import {
OwnerPermissionVal,
WritePermissionVal
} from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const {
id,
parentId,
name,
avatar,
intro,
permission,
agentModel,
websiteConfig,
externalReadUrl,
defaultPermission,
status
} = req.body as DatasetUpdateBody;
if (!id) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
if (permission) {
await authDataset({ req, authToken: true, datasetId: id, per: 'owner' });
if (defaultPermission) {
await authDataset({ req, authToken: true, datasetId: id, per: OwnerPermissionVal });
} else {
await authDataset({ req, authToken: true, datasetId: id, per: 'w' });
await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
}
console.log('update dataset', req.body);
await MongoDataset.findOneAndUpdate(
{
_id: id
@@ -39,20 +43,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
...(parentId !== undefined && { parentId: parentId || null }),
...(name && { name }),
...(avatar && { avatar }),
...(permission && { permission }),
...(agentModel && { agentModel: agentModel.model }),
...(websiteConfig && { websiteConfig }),
...(status && { status }),
...(intro && { intro }),
...(externalReadUrl && { externalReadUrl })
...(externalReadUrl && { externalReadUrl }),
defaultPermission
}
);
}
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export default NextAPI(handler);

View File

@@ -1,9 +1,10 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { datasetId } = req.query as {
datasetId: string;
};
@@ -13,7 +14,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
}
// 凭证校验
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
const { teamId } = await authDataset({
req,
authToken: true,
datasetId,
per: WritePermissionVal
});
await checkExportDatasetLimit({
teamId,

View File

@@ -1,15 +1,13 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { name, datasetId } = req.body as CreateTrainingUsageProps;
const { teamId, tmbId, dataset } = await authDataset({
@@ -17,7 +15,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
authToken: true,
authApiKey: true,
datasetId,
per: 'w'
per: WritePermissionVal
});
const { billId } = await createTrainingUsage({
@@ -29,13 +27,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
agentModel: getLLMModel(dataset.agentModel).name
});
jsonRes<string>(res, {
data: billId
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return billId;
}
export default NextAPI(handler);

View File

@@ -14,7 +14,6 @@ import { AppUpdateParams } from '@/global/core/app/api';
import dynamic from 'next/dynamic';
import { useI18n } from '@/web/context/I18n';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
const MoveModal = dynamic(() => import('@/components/common/folder/MoveModal'));
type AppListContextType = {

View File

@@ -36,7 +36,7 @@ const EditFolderModal = ({
if (!val) return Promise.resolve('');
return editCallback(val);
},
onSuccess: (res) => {
onSuccess: () => {
onClose();
}
});

View File

@@ -0,0 +1,46 @@
import { Box, Button, Flex } from '@chakra-ui/react';
import React from 'react';
import CollaboratorContextProvider, {
MemberManagerInputPropsType
} from '@/components/support/permission/MemberManager/context';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
function MemberManager({ managePer }: { managePer: MemberManagerInputPropsType }) {
const { t } = useTranslation();
return (
<Box mt={4}>
<CollaboratorContextProvider {...managePer}>
{({ MemberListCard, onOpenManageModal, onOpenAddMember }) => {
return (
<>
<Flex alignItems="center" flexDirection="row" justifyContent="space-between" w="full">
<Flex flexDirection="row" gap="2">
<Button
size="sm"
variant="whitePrimary"
leftIcon={<MyIcon w="4" name="common/settingLight" />}
onClick={onOpenManageModal}
>
{t('permission.Manage')}
</Button>
<Button
size="sm"
variant="whitePrimary"
leftIcon={<MyIcon w="4" name="support/permission/collaborator" />}
onClick={onOpenAddMember}
>
{t('common.Add')}
</Button>
</Flex>
</Flex>
<MemberListCard mt={2} p={1.5} bg="myGray.100" borderRadius="md" />
</>
);
}}
</CollaboratorContextProvider>
</Box>
);
}
export default MemberManager;

View File

@@ -26,8 +26,6 @@ import EditFolderModal, { useEditFolder } from '../../../component/EditFolderMod
import { TabEnum } from '../../index';
import ParentPath from '@/components/common/ParentPaths';
import dynamic from 'next/dynamic';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useContextSelector } from 'use-context-selector';
@@ -40,7 +38,6 @@ const Header = ({}: {}) => {
const { t } = useTranslation();
const theme = useTheme();
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const router = useRouter();
@@ -189,7 +186,7 @@ const Header = ({}: {}) => {
)}
{/* diff collection button */}
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
{datasetDetail.permission.hasWritePer && (
<>
{datasetDetail?.type === DatasetTypeEnum.dataset && (
<MyMenu

View File

@@ -37,8 +37,6 @@ import { useDrag } from '@/web/common/hooks/useDrag';
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
import { useToast } from '@fastgpt/web/hooks/useToast';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { useContextSelector } from 'use-context-selector';
@@ -53,7 +51,6 @@ const CollectionCard = () => {
const router = useRouter();
const { toast } = useToast();
const { t } = useTranslation();
const { userInfo } = useUserStore();
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
@@ -213,7 +210,7 @@ const CollectionCard = () => {
}
bg={dragTargetId === collection._id ? 'primary.100' : ''}
userSelect={'none'}
onDragStart={(e) => {
onDragStart={() => {
setDragStartId(collection._id);
}}
onDragOver={(e) => {
@@ -296,7 +293,7 @@ const CollectionCard = () => {
</Box>
</Td>
<Td onClick={(e) => e.stopPropagation()}>
{collection.canWrite && userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
{collection.permission.hasWritePer && (
<MyMenu
width={100}
offset={[-70, 5]}

View File

@@ -35,8 +35,6 @@ import InputDataModal from '../components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import { TabEnum } from '..';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { DatasetCollectionTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
@@ -47,18 +45,21 @@ import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
import { useI18n } from '@/web/context/I18n';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { useContextSelector } from 'use-context-selector';
const DataCard = () => {
const BoxRef = useRef<HTMLDivElement>(null);
const theme = useTheme();
const lastSearch = useRef('');
const router = useRouter();
const { userInfo } = useUserStore();
const { isPc } = useSystemStore();
const { collectionId = '', datasetId } = router.query as {
collectionId: string;
datasetId: string;
};
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { Loading, setIsLoading } = useLoading({ defaultLoading: true });
const { t } = useTranslation();
const { datasetT } = useI18n();
@@ -101,7 +102,7 @@ const DataCard = () => {
getData(1);
lastSearch.current = searchText;
}, 300),
[]
[searchText]
);
// get file info
@@ -119,10 +120,7 @@ const DataCard = () => {
}
);
const canWrite = useMemo(
() => userInfo?.team?.role !== TeamMemberRoleEnum.visitor && !!collection?.canWrite,
[collection?.canWrite, userInfo?.team?.role]
);
const canWrite = useMemo(() => datasetDetail.permission.hasWritePer, [datasetDetail]);
const metadataList = useMemo(() => {
if (!collection) return [];
@@ -291,7 +289,7 @@ const DataCard = () => {
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={4}
>
{datasetDataList.map((item, index) => (
{datasetDataList.map((item) => (
<Card
key={item._id}
cursor={'pointer'}

View File

@@ -1,4 +1,4 @@
import React, { useState, useMemo } from 'react';
import React from 'react';
import { useRouter } from 'next/router';
import { Box, Flex, Button, IconButton, Input, Textarea, HStack } from '@chakra-ui/react';
import { DeleteIcon } from '@chakra-ui/icons';
@@ -11,7 +11,6 @@ import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
import Avatar from '@/components/Avatar';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import PermissionRadio from '@/components/support/permission/Radio';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
@@ -25,10 +24,21 @@ import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import DefaultPermissionList from '@/components/support/permission/DefaultPerList';
import {
DatasetDefaultPermission,
DatasetPermissionList
} from '@fastgpt/global/support/permission/dataset/constant';
import MemberManager from '../../component/MemberManager';
import {
getCollaboratorList,
postUpdateDatasetCollaborators,
deleteDatasetCollaborators
} from '@/web/core/dataset/api/collaborator';
const Info = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
const { datasetT } = useI18n();
const { datasetT, commonT } = useI18n();
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
useContextSelector(DatasetPageContext, (v) => v);
@@ -44,7 +54,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const avatar = watch('avatar');
const vectorModel = watch('vectorModel');
const agentModel = watch('agentModel');
const permission = watch('permission');
const defaultPermission = watch('defaultPermission');
const { datasetModelList, vectorModelList } = useSystemStore();
@@ -233,20 +243,46 @@ const Info = ({ datasetId }: { datasetId: string }) => {
<FormLabel flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</FormLabel>
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
</Flex>
{datasetDetail.isOwner && (
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
{datasetDetail.permission.hasManagePer && (
<>
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
{t('user.Permission')}
{commonT('permission.Default permission')}
</FormLabel>
<Box>
<PermissionRadio
value={permission}
onChange={(e) => {
setValue('permission', e);
<DefaultPermissionList
w="320px"
per={defaultPermission}
defaultPer={DatasetDefaultPermission}
onChange={(v) => setValue('defaultPermission', v)}
/>
</Flex>
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
{commonT('permission.Collaborator')}
</FormLabel>
<Box flex={1}>
<MemberManager
managePer={{
permission: datasetDetail.permission,
onGetCollaboratorList: () => getCollaboratorList(datasetId),
permissionList: DatasetPermissionList,
onUpdateCollaborators: (body) =>
postUpdateDatasetCollaborators({
...body,
datasetId
}),
onDelOneCollaborator: (tmbId) =>
deleteDatasetCollaborators({
datasetId,
tmbId
})
}}
/>
</Box>
</Flex>
</>
)}
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
@@ -259,7 +295,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
>
{t('common.Save')}
</Button>
{datasetDetail.isOwner && (
{datasetDetail.permission.isOwner && (
<IconButton
isLoading={btnLoading}
icon={<DeleteIcon />}

View File

@@ -365,9 +365,11 @@ const InputDataModal = ({
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
{t('common.Close')}
</Button>
<MyTooltip label={collection.canWrite ? '' : t('dataset.data.Can not edit')}>
<MyTooltip
label={collection.permission.hasWritePer ? '' : t('dataset.data.Can not edit')}
>
<Button
isDisabled={!collection.canWrite}
isDisabled={!collection.permission.hasWritePer}
// @ts-ignore
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
>

View File

@@ -1,7 +1,5 @@
import React, { useCallback } from 'react';
import { useTranslation } from 'next-i18next';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useUserStore } from '@/web/support/user/useUserStore';
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import Avatar from '@/components/Avatar';
@@ -29,7 +27,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
const { datasetT } = useI18n();
const router = useRouter();
const query = router.query;
const { userInfo } = useUserStore();
const { isPc } = useSystemStore();
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
useContextSelector(DatasetPageContext, (v) => v);
@@ -41,7 +38,7 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
icon: 'common/overviewLight'
},
{ label: t('core.dataset.test.Search Test'), id: TabEnum.test, icon: 'kbTest' },
...(userInfo?.team.permission.hasManagePer || datasetDetail.isOwner
...(datasetDetail.permission.hasManagePer
? [{ label: t('common.Config'), id: TabEnum.info, icon: 'common/settingLight' }]
: [])
];

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useState } from 'react';
import React, { useCallback } from 'react';
import { Box, Flex, Button, ModalFooter, ModalBody, Input } from '@chakra-ui/react';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useForm } from 'react-hook-form';
@@ -20,6 +20,7 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
import AIModelSelector from '@/components/Select/AIModelSelector';
import { useI18n } from '@/web/context/I18n';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
const { t } = useTranslation();
@@ -38,7 +39,8 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
name: '',
intro: '',
vectorModel: filterNotHiddenVectorModelList[0].model,
agentModel: datasetModelList[0].model
agentModel: datasetModelList[0].model,
defaultPermission: DatasetDefaultPermission
}
});
const avatar = watch('avatar');

View File

@@ -0,0 +1,496 @@
import { useDrag } from '@/web/common/hooks/useDrag';
import { delDatasetById, getDatasetById, putDatasetById } from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { Box, Flex, Grid } from '@chakra-ui/react';
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import MyIcon from '@fastgpt/web/components/common/Icon';
import React, { useMemo, useRef, useState } from 'react';
import { useRouter } from 'next/router';
import PermissionIconText from '@/components/support/permission/IconText';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import Avatar from '@/components/Avatar';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { downloadFetch } from '@/web/common/system/utils';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import dynamic from 'next/dynamic';
import { EditResourceInfoFormType } from '@/components/common/Modal/EditResourceModal';
import { useContextSelector } from 'use-context-selector';
import { DatasetContext } from '../context';
import {
DatasetDefaultPermission,
DatasetPermissionList
} from '@fastgpt/global/support/permission/dataset/constant';
import ConfigPerModal from '@/components/support/permission/ConfigPerModal';
import {
deleteDatasetCollaborators,
getCollaboratorList,
postUpdateDatasetCollaborators
} from '@/web/core/dataset/api/collaborator';
import FolderSlideCard from '@/components/common/folder/SlideCard';
import { useQuery } from '@tanstack/react-query';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
const MoveModal = dynamic(() => import('./MoveModal'), { ssr: false });
function List() {
const { setLoading, isPc } = useSystemStore();
const { toast } = useToast();
const { t } = useTranslation();
const { refetch } = useContextSelector(DatasetContext, (v) => v);
const [editPerDatasetIndex, setEditPerDatasetIndex] = useState<number>();
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
const editPerDataset = useMemo(
() => (editPerDatasetIndex !== undefined ? myDatasets[editPerDatasetIndex] : undefined),
[editPerDatasetIndex, myDatasets]
);
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { data: folderDetail, refetch: refetchFolderDetail } = useQuery(
['folderDetail', parentId, myDatasets],
() => (parentId ? getDatasetById(parentId) : undefined)
);
const { mutate: exportDataset } = useRequest({
mutationFn: async (dataset: DatasetItemType) => {
setLoading(true);
await checkTeamExportDatasetLimit(dataset._id);
await downloadFetch({
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
filename: `${dataset.name}.csv`
});
},
onSuccess() {
toast({
status: 'success',
title: t('core.dataset.Start export')
});
},
onSettled() {
setLoading(false);
},
errorToast: t('dataset.Export Dataset Limit Error')
});
const { mutate: onclickDelDataset } = useRequest({
mutationFn: async (id: string) => {
setLoading(true);
await delDatasetById(id);
return id;
},
onSuccess(id: string) {
setMyDatasets(myDatasets.filter((item) => item._id !== id));
},
onSettled() {
setLoading(false);
},
successToast: t('common.Delete Success'),
errorToast: t('dataset.Delete Dataset Error')
});
const EditResourceModal = dynamic(() => import('@/components/common/Modal/EditResourceModal'));
const [editedDataset, setEditedDataset] = useState<EditResourceInfoFormType>();
const DeleteTipsMap = useRef({
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
});
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
useDrag();
const formatDatasets = useMemo(
() =>
myDatasets.map((item) => {
return {
...item,
label: DatasetTypeMap[item.type]?.label,
icon: DatasetTypeMap[item.type]?.icon
};
}),
[myDatasets]
);
const { openConfirm, ConfirmModal } = useConfirm({
type: 'delete'
});
const onDeleteDataset = (id: string) => {
openConfirm(
() => onclickDelDataset(id),
undefined,
DeleteTipsMap.current[DatasetTypeEnum.dataset]
)();
};
return (
<>
<Flex>
{formatDatasets.length > 0 && (
<Grid
flexGrow={1}
py={5}
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={5}
userSelect={'none'}
>
{formatDatasets.map((dataset, index) => (
<MyTooltip
key={dataset._id}
label={
<Flex flexDirection={'column'} alignItems={'center'}>
<Box fontSize={'xs'} color={'myGray.500'}>
{dataset.type === DatasetTypeEnum.folder ? '打开文件夹' : '打开知识库'}
</Box>
</Flex>
}
>
<Box
display={'flex'}
flexDirection={'column'}
py={3}
px={5}
cursor={'pointer'}
borderWidth={1.5}
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
bg={'white'}
borderRadius={'md'}
minH={'130px'}
position={'relative'}
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
draggable
onDragStart={() => {
setDragStartId(dataset._id);
}}
onDragOver={(e) => {
e.preventDefault();
const targetId = e.currentTarget.getAttribute('data-drag-id');
if (!targetId) return;
DatasetTypeEnum.folder && setDragTargetId(targetId);
}}
onDragLeave={(e) => {
e.preventDefault();
setDragTargetId(undefined);
}}
onDrop={async (e) => {
e.preventDefault();
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
// update parentId
try {
await putDatasetById({
id: dragStartId,
parentId: dragTargetId
});
refetch();
} catch (error) {}
setDragTargetId(undefined);
}}
_hover={{
borderColor: 'primary.300',
boxShadow: '1.5',
'& .delete': {
display: 'block'
},
'& .more': {
display: 'flex'
}
}}
onClick={() => {
if (dataset.type === DatasetTypeEnum.folder) {
router.push({
pathname: '/dataset/list',
query: {
parentId: dataset._id
}
});
} else {
router.push({
pathname: '/dataset/detail',
query: {
datasetId: dataset._id
}
});
}
}}
>
{dataset.permission.hasWritePer && (
<Box
className="more"
display="none"
position={'absolute'}
top={3}
right={3}
borderRadius={'md'}
_hover={{
color: 'primary.500',
'& .icon': {
bg: 'myGray.100'
}
}}
onClick={(e) => {
e.stopPropagation();
}}
>
<MyMenu
width={120}
Button={
<Box w={'22px'} h={'22px'}>
<MyIcon
className="icon"
name={'more'}
h={'16px'}
w={'16px'}
px={1}
py={1}
borderRadius={'md'}
cursor={'pointer'}
/>
</Box>
}
menuList={[
{
children: [
{
icon: 'edit',
label: '编辑信息',
onClick: () =>
setEditedDataset({
id: dataset._id,
name: dataset.name,
intro: dataset.intro,
avatar: dataset.avatar
})
},
{
icon: 'common/file/move',
label: t('Move'),
onClick: () => setMoveDataId(dataset._id)
},
{
icon: 'export',
label: t('Export'),
onClick: () => {
exportDataset(dataset);
}
},
...(dataset.permission.hasManagePer
? [
{
icon: 'support/team/key',
label: t('permission.Permission'),
onClick: () => setEditPerDatasetIndex(index)
}
]
: [])
]
},
...(dataset.permission.hasManagePer
? [
{
children: [
{
icon: 'delete',
label: t('common.Delete'),
type: 'danger' as 'danger',
onClick: () => {
openConfirm(
() => onclickDelDataset(dataset._id),
undefined,
DeleteTipsMap.current[dataset.type]
)();
}
}
]
}
]
: [])
]}
/>
</Box>
)}
<Flex alignItems={'center'} h={'38px'}>
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
<Box mx={3} className="textEllipsis3">
{dataset.name}
</Box>
</Flex>
<Box
flex={1}
className={'textEllipsis3'}
py={1}
wordBreak={'break-all'}
fontSize={'xs'}
color={'myGray.500'}
>
{dataset.intro ||
(dataset.type === DatasetTypeEnum.folder
? t('core.dataset.Folder placeholder')
: t('core.dataset.Intro Placeholder'))}
</Box>
<Flex alignItems={'center'} fontSize={'sm'}>
<Box flex={1}>
<PermissionIconText
defaultPermission={dataset.defaultPermission}
color={'myGray.600'}
/>
</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
</MyTooltip>
))}
</Grid>
)}
{myDatasets.length === 0 && (
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')} flexGrow="1"></EmptyTip>
)}
{!!folderDetail && isPc && (
<Box pt={[4, 6]} ml={[4, 6]}>
<FolderSlideCard
refreshDeps={[folderDetail._id]}
name={folderDetail.name}
intro={folderDetail.intro}
onEdit={() => {
setEditedDataset({
id: folderDetail._id,
name: folderDetail.name,
intro: folderDetail.intro
});
}}
onMove={() => setMoveDataId(folderDetail._id)}
deleteTip={t('dataset.deleteFolderTips')}
onDelete={() => onDeleteDataset(folderDetail._id)}
defaultPer={{
value: folderDetail.defaultPermission,
defaultValue: DatasetDefaultPermission,
onChange: (e) => {
return putDatasetById({
id: folderDetail._id,
defaultPermission: e
});
}
}}
managePer={{
permission: folderDetail.permission,
onGetCollaboratorList: () => getCollaboratorList(folderDetail._id),
permissionList: DatasetPermissionList,
onUpdateCollaborators: ({
tmbIds,
permission
}: {
tmbIds: string[];
permission: number;
}) => {
return postUpdateDatasetCollaborators({
tmbIds,
permission,
datasetId: folderDetail._id
});
},
onDelOneCollaborator: (tmbId: string) =>
deleteDatasetCollaborators({
datasetId: folderDetail._id,
tmbId
})
}}
/>
</Box>
)}
</Flex>
<ConfirmModal />
{editedDataset && (
<EditResourceModal
{...editedDataset}
title={''}
onClose={() => setEditedDataset(undefined)}
onEdit={async (data) => {
await putDatasetById({
id: editedDataset.id,
name: data.name,
intro: data.intro,
avatar: data.avatar
});
loadMyDatasets(parentId);
refetchFolderDetail();
setEditedDataset(undefined);
}}
/>
)}
{!!moveDataId && (
<MoveModal
moveDataId={moveDataId}
onClose={() => setMoveDataId('')}
onSuccess={() => {
refetch();
refetchFolderDetail();
setMoveDataId('');
}}
/>
)}
{!!editPerDataset && (
<ConfigPerModal
avatar={editPerDataset.avatar}
name={editPerDataset.name}
defaultPer={{
value: editPerDataset.defaultPermission,
defaultValue: DatasetDefaultPermission,
onChange: async (e) => {
await putDatasetById({
id: editPerDataset._id,
defaultPermission: e
});
refetch();
}
}}
managePer={{
permission: editPerDataset.permission,
onGetCollaboratorList: () => getCollaboratorList(editPerDataset._id),
permissionList: DatasetPermissionList,
onUpdateCollaborators: ({
tmbIds,
permission
}: {
tmbIds: string[];
permission: number;
}) => {
return postUpdateDatasetCollaborators({
tmbIds,
permission,
datasetId: editPerDataset._id
});
},
onDelOneCollaborator: (tmbId: string) =>
deleteDatasetCollaborators({
datasetId: editPerDataset._id,
tmbId
})
}}
onClose={() => setEditPerDatasetIndex(undefined)}
/>
)}
</>
);
}
export default List;

View File

@@ -0,0 +1,58 @@
import { getDatasetPaths } from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { useTranslation } from 'next-i18next';
import { useRouter } from 'next/router';
import React from 'react';
import { createContext } from 'use-context-selector';
export type DatasetContextType = {
refetch: () => void;
isFetching: boolean;
paths: ParentTreePathItemType[];
};
export const DatasetContext = createContext<DatasetContextType>({
refetch: () => {},
isFetching: false,
paths: []
});
function DatasetContextProvider({ children }: { children: React.ReactNode }) {
const router = useRouter();
const { toast } = useToast();
const { t } = useTranslation();
const { parentId } = router.query as { parentId: string };
const { loadMyDatasets } = useDatasetStore();
const { data, refetch, isFetching } = useQuery(
['loadDataset', parentId],
() => {
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
},
{
onError(err) {
toast({
status: 'error',
title: t(getErrText(err))
});
}
}
);
const paths = data?.[1] || [];
const contextValue = {
refetch,
isFetching,
paths
};
return <DatasetContext.Provider value={contextValue}>{children}</DatasetContext.Provider>;
}
export default DatasetContextProvider;

View File

@@ -1,144 +1,42 @@
import React, { useMemo, useRef, useState } from 'react';
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
import React from 'react';
import { Box, Flex, useDisclosure, Image, Button } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import PageContainer from '@/components/PageContainer';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { AddIcon } from '@chakra-ui/icons';
import { useQuery } from '@tanstack/react-query';
import {
delDatasetById,
getDatasetPaths,
putDatasetById,
postCreateDataset
} from '@/web/core/dataset/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { postCreateDataset } from '@/web/core/dataset/api';
import { useTranslation } from 'next-i18next';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { serviceSideProps } from '@/web/common/utils/i18n';
import dynamic from 'next/dynamic';
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { FolderImgUrl, FolderIcon } from '@fastgpt/global/common/file/image/constants';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import EditFolderModal, { useEditFolder } from '../component/EditFolderModal';
import { useDrag } from '@/web/common/hooks/useDrag';
import { useUserStore } from '@/web/support/user/useUserStore';
import PermissionIconText from '@/components/support/permission/IconText';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import ParentPaths from '@/components/common/ParentPaths';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { downloadFetch } from '@/web/common/system/utils';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import List from './component/List';
import { DatasetContext } from './context';
import DatasetContextProvider from './context';
import { useContextSelector } from 'use-context-selector';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
const Dataset = () => {
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
const DeleteTipsMap = useRef({
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
});
const { openConfirm, ConfirmModal } = useConfirm({
type: 'delete'
});
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
useDrag();
const { myDatasets } = useDatasetStore();
const { parentId } = router.query as { parentId: string };
const {
isOpen: isOpenCreateModal,
onOpen: onOpenCreateModal,
onClose: onCloseCreateModal
} = useDisclosure();
const { editFolderData, setEditFolderData } = useEditFolder();
/* 点击删除 */
const { mutate: onclickDelDataset } = useRequest({
mutationFn: async (id: string) => {
setLoading(true);
await delDatasetById(id);
return id;
},
onSuccess(id: string) {
setMyDatasets(myDatasets.filter((item) => item._id !== id));
},
onSettled() {
setLoading(false);
},
successToast: t('common.Delete Success'),
errorToast: t('dataset.Delete Dataset Error')
});
// check export limit
const { mutate: exportDataset } = useRequest({
mutationFn: async (dataset: DatasetItemType) => {
setLoading(true);
await checkTeamExportDatasetLimit(dataset._id);
await downloadFetch({
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
filename: `${dataset.name}.csv`
});
},
onSuccess() {
toast({
status: 'success',
title: t('core.dataset.Start export')
});
},
onSettled() {
setLoading(false);
},
errorToast: t('dataset.Export Dataset Limit Error')
});
const { data, refetch, isFetching } = useQuery(
['loadDataset', parentId],
() => {
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
},
{
onError(err) {
toast({
status: 'error',
title: t(getErrText(err))
});
}
}
);
const paths = data?.[1] || [];
const formatDatasets = useMemo(
() =>
myDatasets.map((item) => {
return {
...item,
label: DatasetTypeMap[item.type]?.label,
icon: DatasetTypeMap[item.type]?.icon
};
}),
[myDatasets]
);
const { paths, refetch, isFetching } = useContextSelector(DatasetContext, (v) => v);
return (
<PageContainer
@@ -148,7 +46,7 @@ const Dataset = () => {
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
{/* url path */}
<ParentPaths
paths={paths.map((path, i) => ({
paths={paths.map((path) => ({
parentId: path.parentId,
parentName: path.parentName
}))}
@@ -208,254 +106,7 @@ const Dataset = () => {
/>
)}
</Flex>
<Grid
py={5}
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={5}
userSelect={'none'}
>
{formatDatasets.map((dataset) => (
<Box
display={'flex'}
flexDirection={'column'}
key={dataset._id}
py={3}
px={5}
cursor={'pointer'}
borderWidth={1.5}
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
bg={'white'}
borderRadius={'md'}
minH={'130px'}
position={'relative'}
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
draggable
onDragStart={(e) => {
setDragStartId(dataset._id);
}}
onDragOver={(e) => {
e.preventDefault();
const targetId = e.currentTarget.getAttribute('data-drag-id');
if (!targetId) return;
DatasetTypeEnum.folder && setDragTargetId(targetId);
}}
onDragLeave={(e) => {
e.preventDefault();
setDragTargetId(undefined);
}}
onDrop={async (e) => {
e.preventDefault();
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
// update parentId
try {
await putDatasetById({
id: dragStartId,
parentId: dragTargetId
});
refetch();
} catch (error) {}
setDragTargetId(undefined);
}}
_hover={{
borderColor: 'primary.300',
boxShadow: '1.5',
'& .delete': {
display: 'block'
}
}}
onClick={() => {
if (dataset.type === DatasetTypeEnum.folder) {
router.push({
pathname: '/dataset/list',
query: {
parentId: dataset._id
}
});
} else {
router.push({
pathname: '/dataset/detail',
query: {
datasetId: dataset._id
}
});
}
}}
>
{userInfo?.team?.permission.hasWritePer && dataset.isOwner && (
<Box
position={'absolute'}
top={3}
right={3}
borderRadius={'md'}
_hover={{
color: 'primary.500',
'& .icon': {
bg: 'myGray.100'
}
}}
onClick={(e) => {
e.stopPropagation();
}}
>
<MyMenu
Button={
<Box w={'22px'} h={'22px'}>
<MyIcon
className="icon"
name={'more'}
h={'16px'}
w={'16px'}
px={1}
py={1}
borderRadius={'md'}
cursor={'pointer'}
/>
</Box>
}
menuList={[
{
children: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'edit'} w={'14px'} mr={2} />
{t('Rename')}
</Flex>
),
onClick: () =>
onOpenTitleModal({
defaultVal: dataset.name,
onSuccess: (val) => {
if (val === dataset.name || !val) return;
putDatasetById({
id: dataset._id,
name: val
});
}
})
},
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'common/file/move'} w={'14px'} mr={2} />
{t('Move')}
</Flex>
),
onClick: () => setMoveDataId(dataset._id)
},
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'export'} w={'14px'} mr={2} />
{t('Export')}
</Flex>
),
onClick: () => {
exportDataset(dataset);
}
},
...(dataset.permission === PermissionTypeEnum.private
? [
{
label: (
<Flex alignItems={'center'}>
<MyIcon
name={'support/permission/publicLight'}
w={'14px'}
mr={2}
/>
{t('permission.Set Public')}
</Flex>
),
onClick: () => {
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.public
});
}
}
]
: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon
name={'support/permission/privateLight'}
w={'14px'}
mr={2}
/>
{t('permission.Set Private')}
</Flex>
),
onClick: () => {
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.private
});
}
}
])
]
},
{
children: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'delete'} w={'14px'} mr={2} />
{t('common.Delete')}
</Flex>
),
type: 'danger',
onClick: () => {
openConfirm(
() => onclickDelDataset(dataset._id),
undefined,
DeleteTipsMap.current[dataset.type]
)();
}
}
]
}
]}
/>
</Box>
)}
<Flex alignItems={'center'} h={'38px'}>
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
<Box mx={3} className="textEllipsis3">
{dataset.name}
</Box>
</Flex>
<Box
flex={1}
className={'textEllipsis3'}
py={1}
wordBreak={'break-all'}
fontSize={'xs'}
color={'myGray.500'}
>
{dataset.intro ||
(dataset.type === DatasetTypeEnum.folder
? t('core.dataset.Folder placeholder')
: t('core.dataset.Intro Placeholder'))}
</Box>
<Flex alignItems={'center'} fontSize={'sm'}>
<Box flex={1}>
<PermissionIconText permission={dataset.permission} color={'myGray.600'} />
</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
))}
</Grid>
{myDatasets.length === 0 && (
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')}></EmptyTip>
)}
<ConfirmModal />
<EditTitleModal />
<List />
{isOpenCreateModal && <CreateModal onClose={onCloseCreateModal} parentId={parentId} />}
{!!editFolderData && (
<EditFolderModal
@@ -477,16 +128,6 @@ const Dataset = () => {
isEdit={false}
/>
)}
{!!moveDataId && (
<MoveModal
moveDataId={moveDataId}
onClose={() => setMoveDataId('')}
onSuccess={() => {
refetch();
setMoveDataId('');
}}
/>
)}
</PageContainer>
);
};
@@ -499,4 +140,12 @@ export async function getServerSideProps(content: any) {
};
}
export default Dataset;
function DatasetContextWrapper() {
return (
<DatasetContextProvider>
<Dataset />
</DatasetContextProvider>
);
}
export default DatasetContextWrapper;

View File

@@ -1,44 +0,0 @@
import { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { AuthModeType } from '@fastgpt/service/support/permission/type';
/* data permission same of collection */
export async function authDatasetData({
dataId,
...props
}: AuthModeType & {
dataId: string;
}) {
// get mongo dataset.data
const datasetData = await MongoDatasetData.findById(dataId);
if (!datasetData) {
return Promise.reject('core.dataset.error.Data not found');
}
const result = await authDatasetCollection({
...props,
collectionId: datasetData.collectionId
});
const data: DatasetDataItemType = {
id: String(datasetData._id),
teamId: datasetData.teamId,
q: datasetData.q,
a: datasetData.a,
chunkIndex: datasetData.chunkIndex,
indexes: datasetData.indexes,
datasetId: String(datasetData.datasetId),
collectionId: String(datasetData.collectionId),
sourceName: result.collection.name || '',
sourceId: result.collection?.fileId || result.collection?.rawLink,
isOwner: String(datasetData.tmbId) === result.tmbId,
canWrite: result.canWrite
};
return {
...result,
datasetData: data
};
}

View File

@@ -11,5 +11,5 @@ export const getCollaboratorList = (appId: string) =>
export const postUpdateAppCollaborators = (body: UpdateAppCollaboratorBody) =>
POST('/proApi/core/app/collaborator/update', body);
export const deleteAppCollaborators = ({ ...params }: AppCollaboratorDeleteParams) =>
DELETE('/proApi/core/app/collaborator/delete', { ...params });
export const deleteAppCollaborators = (params: AppCollaboratorDeleteParams) =>
DELETE('/proApi/core/app/collaborator/delete', params);

View File

@@ -0,0 +1,15 @@
import {
UpdateDatasetCollaboratorBody,
DatasetCollaboratorDeleteParams
} from '@fastgpt/global/core/dataset/collaborator';
import { DELETE, GET, POST } from '@/web/common/api/request';
import { CollaboratorItemType } from '@fastgpt/global/support/permission/collaborator';
export const getCollaboratorList = (datasetId: string) =>
GET<CollaboratorItemType[]>('/proApi/core/dataset/collaborator/list', { datasetId });
export const postUpdateDatasetCollaborators = (body: UpdateDatasetCollaboratorBody) =>
POST('/proApi/core/dataset/collaborator/update', body);
export const deleteDatasetCollaborators = ({ ...params }: DatasetCollaboratorDeleteParams) =>
DELETE('/proApi/core/dataset/collaborator/delete', { ...params });

View File

@@ -8,6 +8,8 @@ import type {
DatasetCollectionItemType,
DatasetItemType
} from '@fastgpt/global/core/dataset/type.d';
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
export const defaultDatasetDetail: DatasetItemType = {
_id: '',
@@ -21,11 +23,10 @@ export const defaultDatasetDetail: DatasetItemType = {
name: '',
intro: '',
status: 'active',
permission: 'private',
isOwner: false,
canWrite: false,
permission: new DatasetPermission(),
vectorModel: defaultVectorModels[0],
agentModel: defaultQAModels[0]
agentModel: defaultQAModels[0],
defaultPermission: DatasetDefaultPermission
};
export const defaultCollectionDetail: DatasetCollectionItemType = {
@@ -44,20 +45,21 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
name: '',
intro: '',
status: 'active',
permission: 'private',
permission: new DatasetPermission(),
vectorModel: defaultVectorModels[0].model,
agentModel: defaultQAModels[0].model
agentModel: defaultQAModels[0].model,
defaultPermission: DatasetDefaultPermission
},
parentId: '',
name: '',
type: DatasetCollectionTypeEnum.file,
updateTime: new Date(),
canWrite: false,
sourceName: '',
sourceId: '',
createTime: new Date(),
trainingType: TrainingModeEnum.chunk,
chunkSize: 0
chunkSize: 0,
permission: new DatasetPermission()
};
export enum ImportProcessWayEnum {