Dataset Permission (#1786)

* feat: dataset controllers

feat: dataset schema

fix: add missing type to dataset schema
Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: dataset list api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: all dataset api

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: new auth dataset method

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: use new auth method in detail, paths.
feat: add new param defaultPermission to create api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: app auth params

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: use new auth method

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: new auth collection and file method

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset collection api new auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: create/*.ts auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset auth

Signed-off-by: FinleyGe <m13203533462@163.com>

* fix: import paths

Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: dataset collaborator

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: dataset frontend

feat: dataset list frontend

feat: dataset detail
Signed-off-by: FinleyGe <m13203533462@163.com>

* feat: finish the dataset permission

fix: ts errors
Signed-off-by: FinleyGe <m13203533462@163.com>

* fix: empty response of collection api

Signed-off-by: FinleyGe <m13203533462@163.com>

* chore: adjust the code

* chore: adjust the code

* chore: i18n

* fix: ts error

* fix: fe CollectionCard permission

---------

Signed-off-by: FinleyGe <m13203533462@163.com>
This commit is contained in:
Finley Ge
2024-06-20 20:52:03 +08:00
committed by GitHub
parent 2b25e3cc2d
commit 980b4d3db5
71 changed files with 12411 additions and 9993 deletions

View File

@@ -11,6 +11,7 @@ import {
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
/* ================= dataset ===================== */
export type CreateDatasetParams = {
@@ -21,6 +22,7 @@ export type CreateDatasetParams = {
avatar: string;
vectorModel?: string;
agentModel?: string;
defaultPermission?: PermissionValueType;
};
export type RebuildEmbeddingProps = {

View File

@@ -3,6 +3,7 @@ import {
DatasetCollectionSchemaType,
DatasetDataSchemaType
} from '@fastgpt/global/core/dataset/type.d';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
/* ================= dataset ===================== */
@@ -18,7 +19,7 @@ export type DatasetCollectionsListItemType = {
trainingAmount: number;
fileId?: string;
rawLink?: string;
canWrite: boolean;
permission: DatasetPermission;
};
/* ================= data ===================== */

View File

@@ -1,31 +1,66 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import {
PerResourceTypeEnum,
ReadPermissionVal
} from '@fastgpt/global/support/permission/constant';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
/* get all dataset by teamId or tmbId */
async function handler(
req: NextApiRequest,
res: NextApiResponse<any>
): Promise<DatasetSimpleItemType[]> {
// 凭证校验
const { teamId, tmbId, permission } = await authUserPer({
async function handler(req: NextApiRequest): Promise<DatasetSimpleItemType[]> {
const {
teamId,
tmbId,
permission: tmbPer
} = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: ReadPermissionVal
});
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, permission }),
type: { $ne: DatasetTypeEnum.folder }
}).lean();
const [myDatasets, rpList] = await Promise.all([
MongoDataset.find({
teamId,
type: {
$ne: DatasetTypeEnum.folder
}
})
.sort({
updateTime: -1
})
.lean(),
MongoResourcePermission.find({
resourceType: PerResourceTypeEnum.dataset,
teamId,
tmbId
}).lean()
]);
return datasets.map((item) => ({
const filterDatasets = myDatasets
.map((dataset) => {
const perVal = rpList.find(
(item) => String(item.resourceId) === String(dataset._id)
)?.permission;
const Per = new DatasetPermission({
per: perVal ?? dataset.defaultPermission,
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
});
return {
...dataset,
permission: Per
};
})
.filter((app) => app.permission.hasReadPer);
return filterDatasets.map((item) => ({
_id: item._id,
avatar: item.avatar,
name: item.name,

View File

@@ -1,39 +1,27 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const body = req.body as CreateDatasetCollectionParams;
async function handler(req: NextApiRequest) {
const body = req.body as CreateDatasetCollectionParams;
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
});
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: WritePermissionVal
});
const { _id } = await createOneCollection({
...body,
teamId,
tmbId
});
jsonRes(res, {
data: _id
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
const { _id } = await createOneCollection({
...body,
teamId,
tmbId
});
return _id;
}
export default NextAPI(handler);

View File

@@ -1,8 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
@@ -18,97 +16,88 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { datasetId, parentId, fileId } = req.body as FileIdCreateDatasetCollectionParams;
const trainingType = TrainingModeEnum.chunk;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
per: WritePermissionVal,
datasetId: datasetId
});
try {
await connectToDatabase();
// 1. read file
const { rawText, filename } = await readFileContentFromMongo({
teamId,
bucketName: BucketNameEnum.dataset,
fileId,
isQAImport: true
});
console.log(rawText);
// 2. split chunks
const chunks = rawText2Chunks({
rawText,
isQAImport: true
});
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
per: 'w',
datasetId: datasetId
});
// 3. auth limit
await checkDatasetLimit({
teamId,
insertLen: predictDataLimitLength(trainingType, chunks)
});
// 1. read file
const { rawText, filename } = await readFileContentFromMongo({
await mongoSessionRun(async (session) => {
// 4. create collection
const { _id: collectionId } = await createOneCollection({
teamId,
bucketName: BucketNameEnum.dataset,
tmbId,
name: filename,
parentId,
datasetId,
type: DatasetCollectionTypeEnum.file,
fileId,
isQAImport: true
});
console.log(rawText);
// 2. split chunks
const chunks = rawText2Chunks({
rawText,
isQAImport: true
// special metadata
trainingType,
chunkSize: 0,
session
});
// 3. auth limit
await checkDatasetLimit({
// 5. create training bill
const { billId } = await createTrainingUsage({
teamId,
insertLen: predictDataLimitLength(trainingType, chunks)
tmbId,
appName: filename,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
agentModel: getLLMModel(dataset.agentModel)?.name,
session
});
await mongoSessionRun(async (session) => {
// 4. create collection
const { _id: collectionId } = await createOneCollection({
teamId,
tmbId,
name: filename,
parentId,
datasetId,
type: DatasetCollectionTypeEnum.file,
fileId,
// special metadata
trainingType,
chunkSize: 0,
session
});
// 5. create training bill
const { billId } = await createTrainingUsage({
teamId,
tmbId,
appName: filename,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
agentModel: getLLMModel(dataset.agentModel)?.name,
session
});
// 6. insert to training queue
await pushDataListToTrainingQueue({
teamId,
tmbId,
datasetId: dataset._id,
collectionId,
agentModel: dataset.agentModel,
vectorModel: dataset.vectorModel,
trainingMode: trainingType,
billId,
data: chunks.map((chunk, index) => ({
q: chunk.q,
a: chunk.a,
chunkIndex: index
})),
session
});
return collectionId;
// 6. insert to training queue
await pushDataListToTrainingQueue({
teamId,
tmbId,
datasetId: dataset._id,
collectionId,
agentModel: dataset.agentModel,
vectorModel: dataset.vectorModel,
trainingMode: trainingType,
billId,
data: chunks.map((chunk, index) => ({
q: chunk.q,
a: chunk.a,
chunkIndex: index
})),
session
});
jsonRes(res);
} catch (error) {
jsonRes(res, {
code: 500,
error
});
}
return collectionId;
});
}
export default NextAPI(handler);

View File

@@ -1,8 +1,5 @@
import type { NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
@@ -23,11 +20,9 @@ import { MongoRawTextBuffer } from '@fastgpt/service/common/buffer/rawText/schem
import { rawText2Chunks } from '@fastgpt/service/core/dataset/read';
import { NextAPI } from '@/service/middleware/entry';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(
req: ApiRequestProps<FileIdCreateDatasetCollectionParams>,
res: NextApiResponse<any>
) {
async function handler(req: ApiRequestProps<FileIdCreateDatasetCollectionParams>) {
const {
fileId,
trainingType = TrainingModeEnum.chunk,
@@ -37,13 +32,11 @@ async function handler(
...body
} = req.body;
await connectToDatabase();
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
per: 'w',
per: WritePermissionVal,
datasetId: body.datasetId
});
@@ -137,13 +130,10 @@ async function handler(
}
);
// remove buffer
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
return collectionId;
});
// remove buffer
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
jsonRes(res);
}
export default NextAPI(handler);

View File

@@ -1,11 +1,6 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
TrainingModeEnum,
@@ -18,83 +13,75 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
link,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
async function handler(req: NextApiRequest) {
const {
link,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as LinkCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: WritePermissionVal
});
// 1. check dataset limit
await checkDatasetLimit({
teamId,
insertLen: predictDataLimitLength(trainingType, new Array(10))
});
await mongoSessionRun(async (session) => {
// 2. create collection
const collection = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
...body
} = req.body as LinkCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
rawLink: link,
session
});
// 1. check dataset limit
await checkDatasetLimit({
// 3. create bill and start sync
const { billId } = await createTrainingUsage({
teamId,
insertLen: predictDataLimitLength(trainingType, new Array(10))
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name,
session
});
const { _id: collectionId } = await mongoSessionRun(async (session) => {
// 2. create collection
const collection = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
rawLink: link,
session
});
// 3. create bill and start sync
const { billId } = await createTrainingUsage({
teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name,
session
});
// load
await reloadCollectionChunks({
collection: {
...collection.toObject(),
datasetId: dataset
},
tmbId,
billId,
session
});
return collection;
// load
await reloadCollectionChunks({
collection: {
...collection.toObject(),
datasetId: dataset
},
tmbId,
billId,
session
});
jsonRes(res, {
data: { collectionId }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return collection;
});
}
export default NextAPI(handler);

View File

@@ -1,8 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -23,6 +22,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
import { readRawTextByLocalFile } from '@fastgpt/service/common/file/read/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
/**
@@ -49,7 +49,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
req,
authToken: true,
authApiKey: true,
per: 'w',
per: WritePermissionVal,
datasetId: data.datasetId
});
@@ -168,9 +168,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
};
});
jsonRes(res, {
data: { collectionId, results: insertResults }
});
return { collectionId, results: insertResults };
} catch (error) {
removeFilesByPaths(filePaths);

View File

@@ -1,11 +1,6 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import {
TrainingModeEnum,
@@ -20,102 +15,94 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest) {
const {
name,
text,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as TextCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: WritePermissionVal
});
// 1. split text to chunks
const { chunks } = splitText2Chunks({
text,
chunkLen: chunkSize,
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
customReg: chunkSplitter ? [chunkSplitter] : []
});
// 2. check dataset limit
await checkDatasetLimit({
teamId,
insertLen: predictDataLimitLength(trainingType, chunks)
});
const createResult = await mongoSessionRun(async (session) => {
// 3. create collection
const { _id: collectionId } = await createOneCollection({
...body,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.virtual,
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
name,
text,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
...body
} = req.body as TextCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
hashRawText: hashStr(text),
rawTextLength: text.length,
session
});
// 1. split text to chunks
const { chunks } = splitText2Chunks({
text,
chunkLen: chunkSize,
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
customReg: chunkSplitter ? [chunkSplitter] : []
});
// 2. check dataset limit
await checkDatasetLimit({
// 4. create training bill
const { billId } = await createTrainingUsage({
teamId,
insertLen: predictDataLimitLength(trainingType, chunks)
tmbId,
appName: name,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
agentModel: getLLMModel(dataset.agentModel)?.name,
session
});
const createResult = await mongoSessionRun(async (session) => {
// 3. create collection
const { _id: collectionId } = await createOneCollection({
...body,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.virtual,
name,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
hashRawText: hashStr(text),
rawTextLength: text.length,
session
});
// 4. create training bill
const { billId } = await createTrainingUsage({
teamId,
tmbId,
appName: name,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel)?.name,
agentModel: getLLMModel(dataset.agentModel)?.name,
session
});
// 5. push chunks to training queue
const insertResults = await pushDataListToTrainingQueue({
teamId,
tmbId,
datasetId: dataset._id,
collectionId,
agentModel: dataset.agentModel,
vectorModel: dataset.vectorModel,
trainingMode: trainingType,
prompt: qaPrompt,
billId,
data: chunks.map((text, index) => ({
q: text,
chunkIndex: index
})),
session
});
return { collectionId, results: insertResults };
// 5. push chunks to training queue
const insertResults = await pushDataListToTrainingQueue({
teamId,
tmbId,
datasetId: dataset._id,
collectionId,
agentModel: dataset.agentModel,
vectorModel: dataset.vectorModel,
trainingMode: trainingType,
prompt: qaPrompt,
billId,
data: chunks.map((text, index) => ({
q: text,
chunkIndex: index
})),
session
});
jsonRes(res, {
data: createResult
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return { collectionId, results: insertResults };
});
return createResult;
}
export const config = {
@@ -125,3 +112,5 @@ export const config = {
}
}
};
export default NextAPI(handler);

View File

@@ -1,50 +1,42 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
import { delCollectionAndRelatedSources } from '@fastgpt/service/core/dataset/collection/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { id: collectionId } = req.query as { id: string };
const { id: collectionId } = req.query as { id: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
}
const { teamId, collection } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
});
// find all delete id
const collections = await findCollectionAndChild({
teamId,
datasetId: collection.datasetId._id,
collectionId,
fields: '_id teamId datasetId fileId metadata'
});
// delete
await mongoSessionRun((session) =>
delCollectionAndRelatedSources({
collections,
session
})
);
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!collectionId) {
return Promise.reject(CommonErrEnum.missingParams);
}
const { teamId, collection } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId,
per: WritePermissionVal
});
// find all delete id
const collections = await findCollectionAndChild({
teamId,
datasetId: collection.datasetId._id,
collectionId,
fields: '_id teamId datasetId fileId metadata'
});
// delete
await mongoSessionRun((session) =>
delCollectionAndRelatedSources({
collections,
session
})
);
}
export default NextAPI(handler);

View File

@@ -1,50 +1,43 @@
/*
Get one dataset collection detail
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
import type { NextApiRequest } from 'next';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { getFileById } from '@fastgpt/service/common/file/gridfs/controller';
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id } = req.query as { id: string };
async function handler(req: NextApiRequest): Promise<DatasetCollectionItemType> {
const { id } = req.query as { id: string };
if (!id) {
throw new Error('Id is required');
}
// 凭证校验
const { collection, canWrite } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'r'
});
// get file
const file = collection?.fileId
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
: undefined;
jsonRes<DatasetCollectionItemType>(res, {
data: {
...collection,
canWrite,
...getCollectionSourceData(collection),
file
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!id) {
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { collection, permission } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: ReadPermissionVal
});
// get file
const file = collection?.fileId
? await getFileById({ bucketName: BucketNameEnum.dataset, fileId: collection.fileId })
: undefined;
return {
...collection,
...getCollectionSourceData(collection),
permission,
file
};
}
export default NextAPI(handler);

View File

@@ -1,179 +1,167 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { DatasetTrainingCollectionName } from '@fastgpt/service/core/dataset/training/schema';
import { Types } from '@fastgpt/service/common/mongo';
import type { DatasetCollectionsListItemType } from '@/global/core/dataset/type.d';
import type { GetDatasetCollectionsProps } from '@/global/core/api/datasetReq';
import { PagingData } from '@/types';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
let {
pageNum = 1,
pageSize = 10,
datasetId,
parentId = null,
searchText = '',
selectFolder = false,
simple = false
} = req.body as GetDatasetCollectionsProps;
searchText = searchText?.replace(/'/g, '');
pageSize = Math.min(pageSize, 30);
let {
pageNum = 1,
pageSize = 10,
datasetId,
parentId = null,
searchText = '',
selectFolder = false,
simple = false
} = req.body as GetDatasetCollectionsProps;
searchText = searchText?.replace(/'/g, '');
pageSize = Math.min(pageSize, 30);
// auth dataset and get my role
const { teamId, permission } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: ReadPermissionVal
});
// auth dataset and get my role
const { teamId, tmbId, canWrite } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});
const match = {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(datasetId),
parentId: parentId ? new Types.ObjectId(parentId) : null,
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
...(searchText
? {
name: new RegExp(searchText, 'i')
}
: {})
};
const match = {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(datasetId),
parentId: parentId ? new Types.ObjectId(parentId) : null,
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
...(searchText
? {
name: new RegExp(searchText, 'i')
}
: {})
// not count data amount
if (simple) {
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
.sort({
updateTime: -1
})
.lean();
return {
pageNum,
pageSize,
data: await Promise.all(
collections.map(async (item) => ({
...item,
dataAmount: 0,
trainingAmount: 0,
permission
}))
),
total: await MongoDatasetCollection.countDocuments(match)
};
// not count data amount
if (simple) {
const collections = await MongoDatasetCollection.find(match, '_id parentId type name')
.sort({
updateTime: -1
})
.lean();
return jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
data: {
pageNum,
pageSize,
data: await Promise.all(
collections.map(async (item) => ({
...item,
dataAmount: 0,
trainingAmount: 0,
canWrite // admin or team owner can write
}))
),
total: await MongoDatasetCollection.countDocuments(match)
}
});
}
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
MongoDatasetCollection.aggregate([
{
$match: match
},
{
$sort: { updateTime: -1 }
},
{
$skip: (pageNum - 1) * pageSize
},
{
$limit: pageSize
},
// count training data
{
$lookup: {
from: DatasetTrainingCollectionName,
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
pipeline: [
{
$match: {
$expr: {
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
}
}
},
{ $count: 'count' }
],
as: 'trainingCount'
}
},
// count collection total data
{
$lookup: {
from: DatasetDataCollectionName,
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
pipeline: [
{
$match: {
$expr: {
$and: [
{ $eq: ['$teamId', '$$team_id'] },
{ $eq: ['$datasetId', '$$dataset_id'] },
{ $eq: ['$collectionId', '$$id'] }
]
}
}
},
{ $count: 'count' }
],
as: 'dataCount'
}
},
{
$project: {
_id: 1,
parentId: 1,
tmbId: 1,
name: 1,
type: 1,
status: 1,
updateTime: 1,
fileId: 1,
rawLink: 1,
dataAmount: {
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
},
trainingAmount: {
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
}
}
}
]),
MongoDatasetCollection.countDocuments(match)
]);
const data = await Promise.all(
collections.map(async (item, i) => ({
...item,
canWrite: String(item.tmbId) === tmbId || canWrite
}))
);
if (data.find((item) => item.trainingAmount > 0)) {
startTrainingQueue();
}
// count collections
jsonRes<PagingData<DatasetCollectionsListItemType>>(res, {
data: {
pageNum,
pageSize,
data,
total
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
const [collections, total]: [DatasetCollectionsListItemType[], number] = await Promise.all([
MongoDatasetCollection.aggregate([
{
$match: match
},
{
$sort: { updateTime: -1 }
},
{
$skip: (pageNum - 1) * pageSize
},
{
$limit: pageSize
},
// count training data
{
$lookup: {
from: DatasetTrainingCollectionName,
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
pipeline: [
{
$match: {
$expr: {
$and: [{ $eq: ['$teamId', '$$team_id'] }, { $eq: ['$collectionId', '$$id'] }]
}
}
},
{ $count: 'count' }
],
as: 'trainingCount'
}
},
// count collection total data
{
$lookup: {
from: DatasetDataCollectionName,
let: { id: '$_id', team_id: match.teamId, dataset_id: match.datasetId },
pipeline: [
{
$match: {
$expr: {
$and: [
{ $eq: ['$teamId', '$$team_id'] },
{ $eq: ['$datasetId', '$$dataset_id'] },
{ $eq: ['$collectionId', '$$id'] }
]
}
}
},
{ $count: 'count' }
],
as: 'dataCount'
}
},
{
$project: {
_id: 1,
parentId: 1,
tmbId: 1,
name: 1,
type: 1,
status: 1,
updateTime: 1,
fileId: 1,
rawLink: 1,
dataAmount: {
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
},
trainingAmount: {
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
}
}
}
]),
MongoDatasetCollection.countDocuments(match)
]);
const data = await Promise.all(
collections.map(async (item) => ({
...item,
permission
}))
);
if (data.find((item) => item.trainingAmount > 0)) {
startTrainingQueue();
}
// count collections
return {
pageNum,
pageSize,
data,
total
};
}
export default NextAPI(handler);

View File

@@ -1,34 +1,24 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
import type { NextApiRequest } from 'next';
import { getDatasetCollectionPaths } from '@fastgpt/service/core/dataset/collection/utils';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
export default async function handler(req: NextApiRequest) {
const { parentId } = req.query as { parentId: string };
const { parentId } = req.query as { parentId: string };
if (!parentId) {
return jsonRes(res, {
data: []
});
}
await authDatasetCollection({ req, authToken: true, collectionId: parentId, per: 'r' });
const paths = await getDatasetCollectionPaths({
parentId
});
jsonRes<ParentTreePathItemType[]>(res, {
data: paths
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!parentId) {
return [];
}
await authDatasetCollection({
req,
authToken: true,
collectionId: parentId,
per: ReadPermissionVal
});
const paths = await getDatasetCollectionPaths({
parentId
});
return paths;
}

View File

@@ -1,9 +1,10 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { createFileToken } from '@fastgpt/service/support/permission/controller';
import { BucketNameEnum, ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type readCollectionSourceQuery = {
collectionId: string;
@@ -17,15 +18,14 @@ export type readCollectionSourceResponse = {
};
async function handler(
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>,
res: ApiResponseType<any>
req: ApiRequestProps<readCollectionSourceBody, readCollectionSourceQuery>
): Promise<readCollectionSourceResponse> {
const { collection, teamId, tmbId } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: req.query.collectionId,
per: 'r'
per: ReadPermissionVal
});
const sourceUrl = await (async () => {

View File

@@ -1,7 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import {
getCollectionAndRawText,
reloadCollectionChunks
@@ -17,98 +15,90 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { collectionId } = req.body as { collectionId: string };
const { collectionId } = req.body as { collectionId: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
}
const { collection, tmbId } = await authDatasetCollection({
req,
authToken: true,
collectionId,
per: 'w'
});
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
return Promise.reject(DatasetErrEnum.unLinkCollection);
}
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
collection
});
if (isSameRawText) {
return jsonRes(res, {
data: DatasetCollectionSyncResultEnum.sameRaw
});
}
/* Not the same original text, create and reload */
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
const agentModelData = getLLMModel(collection.datasetId.agentModel);
await mongoSessionRun(async (session) => {
// create training bill
const { billId } = await createTrainingUsage({
teamId: collection.teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: UsageSourceEnum.training,
vectorModel: vectorModelData.name,
agentModel: agentModelData.name,
session
});
// create a collection and delete old
const newCol = await createOneCollection({
teamId: collection.teamId,
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,
fileId: collection.fileId,
rawLink: collection.rawLink,
metadata: collection.metadata,
createTime: collection.createTime,
session
});
// start load
await reloadCollectionChunks({
collection: {
...newCol.toObject(),
datasetId: collection.datasetId
},
tmbId,
billId,
rawText,
session
});
// delete old collection
await delCollectionAndRelatedSources({
collections: [collection],
session
});
});
jsonRes(res, {
data: DatasetCollectionSyncResultEnum.success
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!collectionId) {
return Promise.reject(CommonErrEnum.missingParams);
}
const { collection, tmbId } = await authDatasetCollection({
req,
authToken: true,
collectionId,
per: WritePermissionVal
});
if (collection.type !== DatasetCollectionTypeEnum.link || !collection.rawLink) {
return Promise.reject(DatasetErrEnum.unLinkCollection);
}
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
collection
});
if (isSameRawText) {
return DatasetCollectionSyncResultEnum.sameRaw;
}
/* Not the same original text, create and reload */
const vectorModelData = getVectorModel(collection.datasetId.vectorModel);
const agentModelData = getLLMModel(collection.datasetId.agentModel);
await mongoSessionRun(async (session) => {
// create training bill
const { billId } = await createTrainingUsage({
teamId: collection.teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: UsageSourceEnum.training,
vectorModel: vectorModelData.name,
agentModel: agentModelData.name,
session
});
// create a collection and delete old
const newCol = await createOneCollection({
teamId: collection.teamId,
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,
fileId: collection.fileId,
rawLink: collection.rawLink,
metadata: collection.metadata,
createTime: collection.createTime,
session
});
// start load
await reloadCollectionChunks({
collection: {
...newCol.toObject(),
datasetId: collection.datasetId
},
tmbId,
billId,
rawText,
session
});
// delete old collection
await delCollectionAndRelatedSources({
collections: [collection],
session
});
});
return DatasetCollectionSyncResultEnum.success;
}
export default NextAPI(handler);

View File

@@ -1,43 +1,36 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import type { UpdateDatasetCollectionParams } from '@/global/core/api/datasetReq.d';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { getCollectionUpdateTime } from '@fastgpt/service/core/dataset/collection/utils';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
async function handler(req: NextApiRequest) {
const { id, parentId, name } = req.body as UpdateDatasetCollectionParams;
if (!id) {
throw new Error('缺少参数');
}
// 凭证校验
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'w'
});
const updateFields: Record<string, any> = {
...(parentId !== undefined && { parentId: parentId || null }),
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
};
await MongoDatasetCollection.findByIdAndUpdate(id, {
$set: updateFields
});
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!id) {
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: WritePermissionVal
});
const updateFields: Record<string, any> = {
...(parentId !== undefined && { parentId: parentId || null }),
...(name && { name, updateTime: getCollectionUpdateTime({ name }) })
};
await MongoDatasetCollection.findByIdAndUpdate(id, {
$set: updateFields
});
}
export default NextAPI(handler);

View File

@@ -1,6 +1,4 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -8,62 +6,59 @@ import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { getLLMModel, getVectorModel, getDatasetModel } from '@fastgpt/service/core/ai/model';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NullPermission, WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
parentId,
name,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel = getDatasetModel().model
} = req.body as CreateDatasetParams;
async function handler(req: NextApiRequest) {
const {
parentId,
name,
type = DatasetTypeEnum.dataset,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel = getDatasetModel().model,
defaultPermission = NullPermission
} = req.body as CreateDatasetParams;
// auth
const { teamId, tmbId } = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: WritePermissionVal
});
// auth
const { teamId, tmbId } = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: WritePermissionVal
});
// check model valid
const vectorModelStore = getVectorModel(vectorModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid');
}
// check model valid
const vectorModelStore = getVectorModel(vectorModel);
const agentModelStore = getLLMModel(agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid'); // TODO: use enum code
}
// check limit
await checkTeamDatasetLimit(teamId);
// check limit
await checkTeamDatasetLimit(teamId);
const { _id } = await MongoDataset.create({
name,
const { _id } = await MongoDataset.create({
name,
teamId,
tmbId,
vectorModel,
agentModel,
avatar,
parentId: parentId || null,
type,
defaultPermission
});
if (type === DatasetTypeEnum.dataset) {
await createDefaultCollection({
datasetId: _id,
teamId,
tmbId,
vectorModel,
agentModel,
avatar,
parentId: parentId || null,
type
});
if (type === DatasetTypeEnum.dataset) {
await createDefaultCollection({
datasetId: _id,
teamId,
tmbId
});
}
jsonRes(res, { data: _id });
} catch (err) {
jsonRes(res, {
code: 500,
error: err
tmbId
});
}
return _id;
}
export default NextAPI(handler);

View File

@@ -1,32 +1,31 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
import { deleteDatasetData } from '@/service/core/dataset/data/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id: dataId } = req.query as {
id: string;
};
if (!dataId) {
throw new Error('dataId is required');
Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { teamId, datasetData } = await authDatasetData({
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'w'
per: WritePermissionVal
});
await deleteDatasetData(datasetData);
jsonRes(res, {
data: 'success'
});
return 'success';
}
export default NextAPI(handler);

View File

@@ -1,8 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
export type Response = {
id: string;
@@ -11,7 +10,7 @@ export type Response = {
source: string;
};
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id: dataId } = req.query as {
id: string;
};
@@ -22,12 +21,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
dataId,
per: 'r'
per: ReadPermissionVal
});
jsonRes(res, {
data: datasetData
});
return datasetData;
}
export default NextAPI(handler);

View File

@@ -2,30 +2,30 @@
insert one data to dataset (immediately insert)
manual input or mark data
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { countPromptTokens } from '@fastgpt/service/common/string/tiktoken/index';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { hasSameValue } from '@/service/core/dataset/data/utils';
import { insertData2Dataset } from '@/service/core/dataset/data/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { collectionId, q, a, indexes } = req.body as InsertOneDatasetDataProps;
if (!q) {
throw new Error('q is required');
Promise.reject(CommonErrEnum.missingParams);
}
if (!collectionId) {
throw new Error('collectionId is required');
Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
@@ -34,7 +34,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
await checkDatasetLimit({
@@ -93,9 +93,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
model: vectorModelData.model
});
jsonRes<string>(res, {
data: insertId
});
return insertId;
}
export default NextAPI(handler);

View File

@@ -1,15 +1,12 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import type { NextApiRequest } from 'next';
import type { GetDatasetDataListProps } from '@/global/core/api/datasetReq';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { PagingData } from '@/types';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
let {
pageNum = 1,
pageSize = 10,
@@ -25,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'r'
per: ReadPermissionVal
});
searchText = replaceRegChars(searchText).replace(/'/g, '');
@@ -50,14 +47,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
MongoDatasetData.countDocuments(match)
]);
jsonRes<PagingData<DatasetDataListItemType>>(res, {
data: {
pageNum,
pageSize,
data,
total
}
});
return {
pageNum,
pageSize,
data,
total
};
}
export default NextAPI(handler);

View File

@@ -5,11 +5,12 @@ import type {
PushDatasetDataProps,
PushDatasetDataResponse
} from '@fastgpt/global/core/dataset/api.d';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { authDatasetCollection } from '@fastgpt/service/support/permission/dataset/auth';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataListToTrainingQueue } from '@fastgpt/service/core/dataset/training/controller';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const body = req.body as PushDatasetDataProps;
@@ -29,7 +30,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
per: WritePermissionVal
});
// auth dataset limit

View File

@@ -1,14 +1,13 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { updateData2Dataset } from '@/service/core/dataset/data/controller';
import { authDatasetData } from '@/service/support/permission/auth/dataset';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
// auth data permission
@@ -23,7 +22,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
dataId: id,
per: 'w'
per: WritePermissionVal
});
// auth team balance
@@ -46,8 +45,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
tokens,
model: vectorModel
});
jsonRes(res);
}
export default NextAPI(handler);

View File

@@ -1,54 +1,47 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { delDatasetRelevantData } from '@fastgpt/service/core/dataset/controller';
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id: datasetId } = req.query as {
id: string;
};
async function handler(req: NextApiRequest) {
const { id: datasetId } = req.query as {
id: string;
};
if (!datasetId) {
throw new Error('缺少参数');
}
// auth owner
const { teamId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'owner'
});
const datasets = await findDatasetAndAllChildren({
teamId,
datasetId
});
// delete all dataset.data and pg data
await mongoSessionRun(async (session) => {
// delete dataset data
await delDatasetRelevantData({ datasets, session });
await MongoDataset.deleteMany(
{
_id: { $in: datasets.map((d) => d._id) }
},
{ session }
);
});
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!datasetId) {
return Promise.reject(CommonErrEnum.missingParams);
}
// auth owner
const { teamId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: OwnerPermissionVal
});
const datasets = await findDatasetAndAllChildren({
teamId,
datasetId
});
// delete all dataset.data and pg data
await mongoSessionRun(async (session) => {
// delete dataset data
await delDatasetRelevantData({ datasets, session });
await MongoDataset.deleteMany(
{
_id: { $in: datasets.map((d) => d._id) }
},
{ session }
);
});
}
export default NextAPI(handler);

View File

@@ -1,43 +1,39 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id: datasetId } = req.query as {
id: string;
};
type Query = {
id: string;
};
if (!datasetId) {
throw new Error('缺少参数');
}
async function handler(req: ApiRequestProps<Query>): Promise<DatasetItemType> {
const { id: datasetId } = req.query as {
id: string;
};
// 凭证校验
const { dataset, canWrite, isOwner } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});
jsonRes<DatasetItemType>(res, {
data: {
...dataset,
vectorModel: getVectorModel(dataset.vectorModel),
agentModel: getLLMModel(dataset.agentModel),
canWrite,
isOwner
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!datasetId) {
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { dataset, permission } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: ReadPermissionVal
});
return {
...dataset,
permission,
vectorModel: getVectorModel(dataset.vectorModel),
agentModel: getLLMModel(dataset.agentModel)
};
}
export default NextAPI(handler);

View File

@@ -1,7 +1,7 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { responseWriteController } from '@fastgpt/service/common/response';
import { addLog } from '@fastgpt/service/common/system/log';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { findDatasetAndAllChildren } from '@fastgpt/service/core/dataset/controller';
import {
@@ -9,6 +9,8 @@ import {
updateExportDatasetLimit
} from '@fastgpt/service/support/user/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
let { datasetId } = req.query as {
@@ -16,11 +18,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
};
if (!datasetId || !global.pgClient) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
// 凭证校验
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
const { teamId } = await authDataset({
req,
authToken: true,
datasetId,
per: WritePermissionVal
});
await checkExportDatasetLimit({
teamId,

View File

@@ -1,24 +1,14 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authDatasetFile } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type.d';
import type { NextApiRequest } from 'next';
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { fileId } = req.query as { fileId: string };
// 凭证校验
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: ReadPermissionVal });
const { fileId } = req.query as { fileId: string };
// 凭证校验
const { file } = await authDatasetFile({ req, authToken: true, fileId, per: 'r' });
jsonRes<DatasetFileSchema>(res, {
data: file
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return file;
}
export default NextAPI(handler);

View File

@@ -1,10 +1,10 @@
import type { NextApiResponse } from 'next';
import { authFile } from '@fastgpt/service/support/permission/auth/file';
import { authDatasetFile } from '@fastgpt/service/support/permission/dataset/auth';
import { DatasetSourceReadTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { rawText2Chunks, readDatasetSourceRawText } from '@fastgpt/service/core/dataset/read';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { NextAPI } from '@/service/middleware/entry';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type PostPreviewFilesChunksProps = {
type: DatasetSourceReadTypeEnum;
@@ -21,8 +21,7 @@ export type PreviewChunksResponse = {
}[];
async function handler(
req: ApiRequestProps<PostPreviewFilesChunksProps>,
res: NextApiResponse<any>
req: ApiRequestProps<PostPreviewFilesChunksProps>
): Promise<PreviewChunksResponse> {
const { type, sourceId, chunkSize, customSplitChar, overlapRatio, selector, isQAImport } =
req.body;
@@ -36,7 +35,13 @@ async function handler(
const { teamId } = await (async () => {
if (type === DatasetSourceReadTypeEnum.fileLocal) {
return authFile({ req, authToken: true, authApiKey: true, fileId: sourceId });
return authDatasetFile({
req,
authToken: true,
authApiKey: true,
fileId: sourceId,
per: ReadPermissionVal
});
}
return authCert({ req, authApiKey: true, authToken: true });
})();

View File

@@ -1,34 +1,68 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import type { NextApiRequest } from 'next';
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { getVectorModel } from '@fastgpt/service/core/ai/model';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
import {
PerResourceTypeEnum,
ReadPermissionVal
} from '@fastgpt/global/support/permission/constant';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
// 凭证校验
const { teamId, tmbId, permission } = await authUserPer({
const {
teamId,
tmbId,
permission: tmbPer
} = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: ReadPermissionVal
});
const datasets = await MongoDataset.find({
...mongoRPermission({ teamId, tmbId, permission }),
...(parentId !== undefined && { parentId: parentId || null }),
...(type && { type })
})
.sort({ updateTime: -1 })
.lean();
const [myDatasets, rpList] = await Promise.all([
MongoDataset.find({
teamId,
...parseParentIdInMongo(parentId),
...(type && { type })
})
.sort({
updateTime: -1
})
.lean(),
MongoResourcePermission.find({
resourceType: PerResourceTypeEnum.dataset,
teamId,
tmbId
}).lean()
]);
const filterDatasets = myDatasets
.map((dataset) => {
const perVal = rpList.find(
(item) => String(item.resourceId) === String(dataset._id)
)?.permission;
const Per = new DatasetPermission({
per: perVal ?? dataset.defaultPermission,
isOwner: String(dataset.tmbId) === tmbId || tmbPer.isOwner
});
return {
...dataset,
permission: Per
};
})
.filter((app) => app.permission.hasReadPer);
const data = await Promise.all(
datasets.map<DatasetListItemType>((item) => ({
filterDatasets.map<DatasetListItemType>((item) => ({
_id: item._id,
parentId: item.parentId,
avatar: item.avatar,
@@ -36,15 +70,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
intro: item.intro,
type: item.type,
permission: item.permission,
canWrite: permission.hasWritePer,
isOwner: permission.isOwner || String(item.tmbId) === tmbId,
vectorModel: getVectorModel(item.vectorModel)
vectorModel: getVectorModel(item.vectorModel),
defaultPermission: item.defaultPermission
}))
);
jsonRes<DatasetListItemType[]>(res, {
data
});
return data;
}
export default NextAPI(handler);

View File

@@ -1,33 +1,20 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
async function handler(req: NextApiRequest) {
const { parentId } = req.query as { parentId: string };
const { parentId } = req.query as { parentId: string };
if (!parentId) {
return jsonRes(res, {
data: []
});
}
await authDataset({ req, authToken: true, datasetId: parentId, per: 'r' });
jsonRes<ParentTreePathItemType[]>(res, {
data: await getParents(parentId)
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!parentId) {
return [];
}
await authDataset({ req, authToken: true, datasetId: parentId, per: ReadPermissionVal });
return await getParents(parentId);
}
async function getParents(parentId?: string): Promise<ParentTreePathItemType[]> {
@@ -44,3 +31,5 @@ async function getParents(parentId?: string): Promise<ParentTreePathItemType[]>
return paths;
}
export default NextAPI(handler);

View File

@@ -1,7 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import type { SearchTestProps } from '@/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
@@ -13,8 +12,10 @@ import {
checkTeamReRankPermission
} from '@fastgpt/service/support/permission/teamLimit';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const {
datasetId,
text,
@@ -29,8 +30,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
} = req.body as SearchTestProps;
if (!datasetId || !text) {
throw new Error('缺少参数');
return Promise.reject(CommonErrEnum.missingParams);
}
const start = Date.now();
// auth dataset role
@@ -39,7 +41,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
// auth balance
await checkTeamAIPoints(teamId);
@@ -88,14 +90,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
});
}
jsonRes<SearchTestResponse>(res, {
data: {
list: searchRes,
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
queryExtensionModel: aiExtensionResult?.model,
...result
}
});
return {
list: searchRes,
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
queryExtensionModel: aiExtensionResult?.model,
...result
};
}
export default NextAPI(handler);

View File

@@ -1,10 +1,9 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
type Props = {};
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
export type getDatasetTrainingQueueResponse = {
rebuildingCount: number;
@@ -12,8 +11,7 @@ export type getDatasetTrainingQueueResponse = {
};
async function handler(
req: ApiRequestProps<any, { datasetId: string }>,
res: ApiResponseType<any>
req: ApiRequestProps<any, { datasetId: string }>
): Promise<getDatasetTrainingQueueResponse> {
const { datasetId } = req.query;
@@ -22,7 +20,7 @@ async function handler(
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
per: ReadPermissionVal
});
const [rebuildingCount, trainingCount] = await Promise.all([

View File

@@ -1,46 +1,37 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authToken: true });
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
async function handler(req: NextApiRequest) {
await authCert({ req, authToken: true });
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
// get queue data
// 分别统计 model = vectorModel和agentModel的数量
const data = await MongoDatasetTraining.aggregate([
{
$match: {
lockTime: { $lt: new Date('2040/1/1') },
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
}
},
{
$group: {
_id: '$model',
count: { $sum: 1 }
}
// get queue data
// 分别统计 model = vectorModel和agentModel的数量
const data = await MongoDatasetTraining.aggregate([
{
$match: {
lockTime: { $lt: new Date('2040/1/1') },
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
}
]);
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
jsonRes(res, {
data: {
vectorTrainingCount,
agentTrainingCount
},
{
$group: {
_id: '$model',
count: { $sum: 1 }
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
]);
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
return {
vectorTrainingCount,
agentTrainingCount
};
}
export default NextAPI(handler);

View File

@@ -1,5 +1,5 @@
import { NextAPI } from '@/service/middleware/entry';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
@@ -8,7 +8,8 @@ import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/contr
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
import { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
export type rebuildEmbeddingBody = {
datasetId: string;
@@ -17,10 +18,7 @@ export type rebuildEmbeddingBody = {
export type Response = {};
async function handler(
req: ApiRequestProps<rebuildEmbeddingBody>,
res: ApiResponseType<any>
): Promise<Response> {
async function handler(req: ApiRequestProps<rebuildEmbeddingBody>): Promise<Response> {
const { datasetId, vectorModel } = req.body;
const { teamId, tmbId, dataset } = await authDataset({
@@ -28,7 +26,7 @@ async function handler(
authToken: true,
authApiKey: true,
datasetId,
per: 'owner'
per: OwnerPermissionVal
});
// check vector model

View File

@@ -1,58 +1,56 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { NextApiRequest } from 'next';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import type { DatasetUpdateBody } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { NextAPI } from '@/service/middleware/entry';
import {
OwnerPermissionVal,
WritePermissionVal
} from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
id,
parentId,
name,
avatar,
intro,
permission,
agentModel,
websiteConfig,
externalReadUrl,
status
} = req.body as DatasetUpdateBody;
async function handler(req: NextApiRequest) {
const {
id,
parentId,
name,
avatar,
intro,
agentModel,
websiteConfig,
externalReadUrl,
defaultPermission,
status
} = req.body as DatasetUpdateBody;
if (!id) {
throw new Error('缺少参数');
}
if (permission) {
await authDataset({ req, authToken: true, datasetId: id, per: 'owner' });
} else {
await authDataset({ req, authToken: true, datasetId: id, per: 'w' });
}
await MongoDataset.findOneAndUpdate(
{
_id: id
},
{
...(parentId !== undefined && { parentId: parentId || null }),
...(name && { name }),
...(avatar && { avatar }),
...(permission && { permission }),
...(agentModel && { agentModel: agentModel.model }),
...(websiteConfig && { websiteConfig }),
...(status && { status }),
...(intro && { intro }),
...(externalReadUrl && { externalReadUrl })
}
);
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
if (!id) {
return Promise.reject(CommonErrEnum.missingParams);
}
if (defaultPermission) {
await authDataset({ req, authToken: true, datasetId: id, per: OwnerPermissionVal });
} else {
await authDataset({ req, authToken: true, datasetId: id, per: WritePermissionVal });
}
console.log('update dataset', req.body);
await MongoDataset.findOneAndUpdate(
{
_id: id
},
{
...(parentId !== undefined && { parentId: parentId || null }),
...(name && { name }),
...(avatar && { avatar }),
...(agentModel && { agentModel: agentModel.model }),
...(websiteConfig && { websiteConfig }),
...(status && { status }),
...(intro && { intro }),
...(externalReadUrl && { externalReadUrl }),
defaultPermission
}
);
}
export default NextAPI(handler);

View File

@@ -1,9 +1,10 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import type { NextApiRequest } from 'next';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
import { NextAPI } from '@/service/middleware/entry';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
async function handler(req: NextApiRequest) {
const { datasetId } = req.query as {
datasetId: string;
};
@@ -13,7 +14,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
}
// 凭证校验
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
const { teamId } = await authDataset({
req,
authToken: true,
datasetId,
per: WritePermissionVal
});
await checkExportDatasetLimit({
teamId,

View File

@@ -1,41 +1,33 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { CreateTrainingUsageProps } from '@fastgpt/global/support/wallet/usage/api.d';
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
import { createTrainingUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { NextAPI } from '@/service/middleware/entry';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { name, datasetId } = req.body as CreateTrainingUsageProps;
async function handler(req: NextApiRequest) {
const { name, datasetId } = req.body as CreateTrainingUsageProps;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'w'
});
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: WritePermissionVal
});
const { billId } = await createTrainingUsage({
teamId,
tmbId,
appName: name,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name
});
const { billId } = await createTrainingUsage({
teamId,
tmbId,
appName: name,
billSource: UsageSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getLLMModel(dataset.agentModel).name
});
jsonRes<string>(res, {
data: billId
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
return billId;
}
export default NextAPI(handler);

View File

@@ -14,7 +14,6 @@ import { AppUpdateParams } from '@/global/core/app/api';
import dynamic from 'next/dynamic';
import { useI18n } from '@/web/context/I18n';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
const MoveModal = dynamic(() => import('@/components/common/folder/MoveModal'));
type AppListContextType = {

View File

@@ -36,7 +36,7 @@ const EditFolderModal = ({
if (!val) return Promise.resolve('');
return editCallback(val);
},
onSuccess: (res) => {
onSuccess: () => {
onClose();
}
});

View File

@@ -0,0 +1,46 @@
import { Box, Button, Flex } from '@chakra-ui/react';
import React from 'react';
import CollaboratorContextProvider, {
MemberManagerInputPropsType
} from '@/components/support/permission/MemberManager/context';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
function MemberManager({ managePer }: { managePer: MemberManagerInputPropsType }) {
const { t } = useTranslation();
return (
<Box mt={4}>
<CollaboratorContextProvider {...managePer}>
{({ MemberListCard, onOpenManageModal, onOpenAddMember }) => {
return (
<>
<Flex alignItems="center" flexDirection="row" justifyContent="space-between" w="full">
<Flex flexDirection="row" gap="2">
<Button
size="sm"
variant="whitePrimary"
leftIcon={<MyIcon w="4" name="common/settingLight" />}
onClick={onOpenManageModal}
>
{t('permission.Manage')}
</Button>
<Button
size="sm"
variant="whitePrimary"
leftIcon={<MyIcon w="4" name="support/permission/collaborator" />}
onClick={onOpenAddMember}
>
{t('common.Add')}
</Button>
</Flex>
</Flex>
<MemberListCard mt={2} p={1.5} bg="myGray.100" borderRadius="md" />
</>
);
}}
</CollaboratorContextProvider>
</Box>
);
}
export default MemberManager;

View File

@@ -26,8 +26,6 @@ import EditFolderModal, { useEditFolder } from '../../../component/EditFolderMod
import { TabEnum } from '../../index';
import ParentPath from '@/components/common/ParentPaths';
import dynamic from 'next/dynamic';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { ImportDataSourceEnum } from '@fastgpt/global/core/dataset/constants';
import { useContextSelector } from 'use-context-selector';
@@ -40,7 +38,6 @@ const Header = ({}: {}) => {
const { t } = useTranslation();
const theme = useTheme();
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const router = useRouter();
@@ -189,7 +186,7 @@ const Header = ({}: {}) => {
)}
{/* diff collection button */}
{userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
{datasetDetail.permission.hasWritePer && (
<>
{datasetDetail?.type === DatasetTypeEnum.dataset && (
<MyMenu

View File

@@ -37,8 +37,6 @@ import { useDrag } from '@/web/common/hooks/useDrag';
import SelectCollections from '@/web/core/dataset/components/SelectCollections';
import { useToast } from '@fastgpt/web/hooks/useToast';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
import MyBox from '@fastgpt/web/components/common/MyBox';
import { useContextSelector } from 'use-context-selector';
@@ -53,7 +51,6 @@ const CollectionCard = () => {
const router = useRouter();
const { toast } = useToast();
const { t } = useTranslation();
const { userInfo } = useUserStore();
const { datasetDetail, loadDatasetDetail } = useContextSelector(DatasetPageContext, (v) => v);
const { openConfirm: openDeleteConfirm, ConfirmModal: ConfirmDeleteModal } = useConfirm({
@@ -213,7 +210,7 @@ const CollectionCard = () => {
}
bg={dragTargetId === collection._id ? 'primary.100' : ''}
userSelect={'none'}
onDragStart={(e) => {
onDragStart={() => {
setDragStartId(collection._id);
}}
onDragOver={(e) => {
@@ -296,7 +293,7 @@ const CollectionCard = () => {
</Box>
</Td>
<Td onClick={(e) => e.stopPropagation()}>
{collection.canWrite && userInfo?.team?.role !== TeamMemberRoleEnum.visitor && (
{collection.permission.hasWritePer && (
<MyMenu
width={100}
offset={[-70, 5]}

View File

@@ -35,8 +35,6 @@ import InputDataModal from '../components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import { TabEnum } from '..';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { DatasetCollectionTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
@@ -47,18 +45,21 @@ import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
import { useI18n } from '@/web/context/I18n';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
import { useContextSelector } from 'use-context-selector';
const DataCard = () => {
const BoxRef = useRef<HTMLDivElement>(null);
const theme = useTheme();
const lastSearch = useRef('');
const router = useRouter();
const { userInfo } = useUserStore();
const { isPc } = useSystemStore();
const { collectionId = '', datasetId } = router.query as {
collectionId: string;
datasetId: string;
};
const datasetDetail = useContextSelector(DatasetPageContext, (v) => v.datasetDetail);
const { Loading, setIsLoading } = useLoading({ defaultLoading: true });
const { t } = useTranslation();
const { datasetT } = useI18n();
@@ -101,7 +102,7 @@ const DataCard = () => {
getData(1);
lastSearch.current = searchText;
}, 300),
[]
[searchText]
);
// get file info
@@ -119,10 +120,7 @@ const DataCard = () => {
}
);
const canWrite = useMemo(
() => userInfo?.team?.role !== TeamMemberRoleEnum.visitor && !!collection?.canWrite,
[collection?.canWrite, userInfo?.team?.role]
);
const canWrite = useMemo(() => datasetDetail.permission.hasWritePer, [datasetDetail]);
const metadataList = useMemo(() => {
if (!collection) return [];
@@ -291,7 +289,7 @@ const DataCard = () => {
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={4}
>
{datasetDataList.map((item, index) => (
{datasetDataList.map((item) => (
<Card
key={item._id}
cursor={'pointer'}

View File

@@ -1,4 +1,4 @@
import React, { useState, useMemo } from 'react';
import React from 'react';
import { useRouter } from 'next/router';
import { Box, Flex, Button, IconButton, Input, Textarea, HStack } from '@chakra-ui/react';
import { DeleteIcon } from '@chakra-ui/icons';
@@ -11,7 +11,6 @@ import type { DatasetItemType } from '@fastgpt/global/core/dataset/type.d';
import Avatar from '@/components/Avatar';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import PermissionRadio from '@/components/support/permission/Radio';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
@@ -25,10 +24,21 @@ import MyDivider from '@fastgpt/web/components/common/MyDivider/index';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import DefaultPermissionList from '@/components/support/permission/DefaultPerList';
import {
DatasetDefaultPermission,
DatasetPermissionList
} from '@fastgpt/global/support/permission/dataset/constant';
import MemberManager from '../../component/MemberManager';
import {
getCollaboratorList,
postUpdateDatasetCollaborators,
deleteDatasetCollaborators
} from '@/web/core/dataset/api/collaborator';
const Info = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
const { datasetT } = useI18n();
const { datasetT, commonT } = useI18n();
const { datasetDetail, loadDatasetDetail, updateDataset, rebuildingCount, trainingCount } =
useContextSelector(DatasetPageContext, (v) => v);
@@ -44,7 +54,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const avatar = watch('avatar');
const vectorModel = watch('vectorModel');
const agentModel = watch('agentModel');
const permission = watch('permission');
const defaultPermission = watch('defaultPermission');
const { datasetModelList, vectorModelList } = useSystemStore();
@@ -233,20 +243,46 @@ const Info = ({ datasetId }: { datasetId: string }) => {
<FormLabel flex={['0 0 90px', '0 0 160px']}>{t('common.Intro')}</FormLabel>
<Textarea flex={[1, '0 0 320px']} {...register('intro')} placeholder={t('common.Intro')} />
</Flex>
{datasetDetail.isOwner && (
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'}>
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
{t('user.Permission')}
</FormLabel>
<Box>
<PermissionRadio
value={permission}
onChange={(e) => {
setValue('permission', e);
}}
{datasetDetail.permission.hasManagePer && (
<>
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
{commonT('permission.Default permission')}
</FormLabel>
<DefaultPermissionList
w="320px"
per={defaultPermission}
defaultPer={DatasetDefaultPermission}
onChange={(v) => setValue('defaultPermission', v)}
/>
</Box>
</Flex>
</Flex>
<Flex mt={5} alignItems={'center'} w={'100%'} flexWrap={'wrap'} maxW="500px">
<FormLabel flex={['0 0 90px', '0 0 160px']} w={0}>
{commonT('permission.Collaborator')}
</FormLabel>
<Box flex={1}>
<MemberManager
managePer={{
permission: datasetDetail.permission,
onGetCollaboratorList: () => getCollaboratorList(datasetId),
permissionList: DatasetPermissionList,
onUpdateCollaborators: (body) =>
postUpdateDatasetCollaborators({
...body,
datasetId
}),
onDelOneCollaborator: (tmbId) =>
deleteDatasetCollaborators({
datasetId,
tmbId
})
}}
/>
</Box>
</Flex>
</>
)}
<Flex mt={5} w={'100%'} alignItems={'flex-end'}>
@@ -259,7 +295,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
>
{t('common.Save')}
</Button>
{datasetDetail.isOwner && (
{datasetDetail.permission.isOwner && (
<IconButton
isLoading={btnLoading}
icon={<DeleteIcon />}

View File

@@ -365,9 +365,11 @@ const InputDataModal = ({
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
{t('common.Close')}
</Button>
<MyTooltip label={collection.canWrite ? '' : t('dataset.data.Can not edit')}>
<MyTooltip
label={collection.permission.hasWritePer ? '' : t('dataset.data.Can not edit')}
>
<Button
isDisabled={!collection.canWrite}
isDisabled={!collection.permission.hasWritePer}
// @ts-ignore
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
>

View File

@@ -1,7 +1,5 @@
import React, { useCallback } from 'react';
import { useTranslation } from 'next-i18next';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useUserStore } from '@/web/support/user/useUserStore';
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import Avatar from '@/components/Avatar';
@@ -29,7 +27,6 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
const { datasetT } = useI18n();
const router = useRouter();
const query = router.query;
const { userInfo } = useUserStore();
const { isPc } = useSystemStore();
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount } =
useContextSelector(DatasetPageContext, (v) => v);
@@ -41,7 +38,7 @@ const Slider = ({ currentTab }: { currentTab: TabEnum }) => {
icon: 'common/overviewLight'
},
{ label: t('core.dataset.test.Search Test'), id: TabEnum.test, icon: 'kbTest' },
...(userInfo?.team.permission.hasManagePer || datasetDetail.isOwner
...(datasetDetail.permission.hasManagePer
? [{ label: t('common.Config'), id: TabEnum.info, icon: 'common/settingLight' }]
: [])
];

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useState } from 'react';
import React, { useCallback } from 'react';
import { Box, Flex, Button, ModalFooter, ModalBody, Input } from '@chakra-ui/react';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useForm } from 'react-hook-form';
@@ -20,6 +20,7 @@ import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'
import AIModelSelector from '@/components/Select/AIModelSelector';
import { useI18n } from '@/web/context/I18n';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
const { t } = useTranslation();
@@ -38,7 +39,8 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
name: '',
intro: '',
vectorModel: filterNotHiddenVectorModelList[0].model,
agentModel: datasetModelList[0].model
agentModel: datasetModelList[0].model,
defaultPermission: DatasetDefaultPermission
}
});
const avatar = watch('avatar');

View File

@@ -0,0 +1,496 @@
import { useDrag } from '@/web/common/hooks/useDrag';
import { delDatasetById, getDatasetById, putDatasetById } from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { Box, Flex, Grid } from '@chakra-ui/react';
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import MyIcon from '@fastgpt/web/components/common/Icon';
import React, { useMemo, useRef, useState } from 'react';
import { useRouter } from 'next/router';
import PermissionIconText from '@/components/support/permission/IconText';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import Avatar from '@/components/Avatar';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { downloadFetch } from '@/web/common/system/utils';
import { useTranslation } from 'next-i18next';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import dynamic from 'next/dynamic';
import { EditResourceInfoFormType } from '@/components/common/Modal/EditResourceModal';
import { useContextSelector } from 'use-context-selector';
import { DatasetContext } from '../context';
import {
DatasetDefaultPermission,
DatasetPermissionList
} from '@fastgpt/global/support/permission/dataset/constant';
import ConfigPerModal from '@/components/support/permission/ConfigPerModal';
import {
deleteDatasetCollaborators,
getCollaboratorList,
postUpdateDatasetCollaborators
} from '@/web/core/dataset/api/collaborator';
import FolderSlideCard from '@/components/common/folder/SlideCard';
import { useQuery } from '@tanstack/react-query';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
const MoveModal = dynamic(() => import('./MoveModal'), { ssr: false });
function List() {
const { setLoading, isPc } = useSystemStore();
const { toast } = useToast();
const { t } = useTranslation();
const { refetch } = useContextSelector(DatasetContext, (v) => v);
const [editPerDatasetIndex, setEditPerDatasetIndex] = useState<number>();
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
const editPerDataset = useMemo(
() => (editPerDatasetIndex !== undefined ? myDatasets[editPerDatasetIndex] : undefined),
[editPerDatasetIndex, myDatasets]
);
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { data: folderDetail, refetch: refetchFolderDetail } = useQuery(
['folderDetail', parentId, myDatasets],
() => (parentId ? getDatasetById(parentId) : undefined)
);
const { mutate: exportDataset } = useRequest({
mutationFn: async (dataset: DatasetItemType) => {
setLoading(true);
await checkTeamExportDatasetLimit(dataset._id);
await downloadFetch({
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
filename: `${dataset.name}.csv`
});
},
onSuccess() {
toast({
status: 'success',
title: t('core.dataset.Start export')
});
},
onSettled() {
setLoading(false);
},
errorToast: t('dataset.Export Dataset Limit Error')
});
const { mutate: onclickDelDataset } = useRequest({
mutationFn: async (id: string) => {
setLoading(true);
await delDatasetById(id);
return id;
},
onSuccess(id: string) {
setMyDatasets(myDatasets.filter((item) => item._id !== id));
},
onSettled() {
setLoading(false);
},
successToast: t('common.Delete Success'),
errorToast: t('dataset.Delete Dataset Error')
});
const EditResourceModal = dynamic(() => import('@/components/common/Modal/EditResourceModal'));
const [editedDataset, setEditedDataset] = useState<EditResourceInfoFormType>();
const DeleteTipsMap = useRef({
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
});
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
useDrag();
const formatDatasets = useMemo(
() =>
myDatasets.map((item) => {
return {
...item,
label: DatasetTypeMap[item.type]?.label,
icon: DatasetTypeMap[item.type]?.icon
};
}),
[myDatasets]
);
const { openConfirm, ConfirmModal } = useConfirm({
type: 'delete'
});
const onDeleteDataset = (id: string) => {
openConfirm(
() => onclickDelDataset(id),
undefined,
DeleteTipsMap.current[DatasetTypeEnum.dataset]
)();
};
return (
<>
<Flex>
{formatDatasets.length > 0 && (
<Grid
flexGrow={1}
py={5}
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={5}
userSelect={'none'}
>
{formatDatasets.map((dataset, index) => (
<MyTooltip
key={dataset._id}
label={
<Flex flexDirection={'column'} alignItems={'center'}>
<Box fontSize={'xs'} color={'myGray.500'}>
{dataset.type === DatasetTypeEnum.folder ? '打开文件夹' : '打开知识库'}
</Box>
</Flex>
}
>
<Box
display={'flex'}
flexDirection={'column'}
py={3}
px={5}
cursor={'pointer'}
borderWidth={1.5}
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
bg={'white'}
borderRadius={'md'}
minH={'130px'}
position={'relative'}
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
draggable
onDragStart={() => {
setDragStartId(dataset._id);
}}
onDragOver={(e) => {
e.preventDefault();
const targetId = e.currentTarget.getAttribute('data-drag-id');
if (!targetId) return;
DatasetTypeEnum.folder && setDragTargetId(targetId);
}}
onDragLeave={(e) => {
e.preventDefault();
setDragTargetId(undefined);
}}
onDrop={async (e) => {
e.preventDefault();
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
// update parentId
try {
await putDatasetById({
id: dragStartId,
parentId: dragTargetId
});
refetch();
} catch (error) {}
setDragTargetId(undefined);
}}
_hover={{
borderColor: 'primary.300',
boxShadow: '1.5',
'& .delete': {
display: 'block'
},
'& .more': {
display: 'flex'
}
}}
onClick={() => {
if (dataset.type === DatasetTypeEnum.folder) {
router.push({
pathname: '/dataset/list',
query: {
parentId: dataset._id
}
});
} else {
router.push({
pathname: '/dataset/detail',
query: {
datasetId: dataset._id
}
});
}
}}
>
{dataset.permission.hasWritePer && (
<Box
className="more"
display="none"
position={'absolute'}
top={3}
right={3}
borderRadius={'md'}
_hover={{
color: 'primary.500',
'& .icon': {
bg: 'myGray.100'
}
}}
onClick={(e) => {
e.stopPropagation();
}}
>
<MyMenu
width={120}
Button={
<Box w={'22px'} h={'22px'}>
<MyIcon
className="icon"
name={'more'}
h={'16px'}
w={'16px'}
px={1}
py={1}
borderRadius={'md'}
cursor={'pointer'}
/>
</Box>
}
menuList={[
{
children: [
{
icon: 'edit',
label: '编辑信息',
onClick: () =>
setEditedDataset({
id: dataset._id,
name: dataset.name,
intro: dataset.intro,
avatar: dataset.avatar
})
},
{
icon: 'common/file/move',
label: t('Move'),
onClick: () => setMoveDataId(dataset._id)
},
{
icon: 'export',
label: t('Export'),
onClick: () => {
exportDataset(dataset);
}
},
...(dataset.permission.hasManagePer
? [
{
icon: 'support/team/key',
label: t('permission.Permission'),
onClick: () => setEditPerDatasetIndex(index)
}
]
: [])
]
},
...(dataset.permission.hasManagePer
? [
{
children: [
{
icon: 'delete',
label: t('common.Delete'),
type: 'danger' as 'danger',
onClick: () => {
openConfirm(
() => onclickDelDataset(dataset._id),
undefined,
DeleteTipsMap.current[dataset.type]
)();
}
}
]
}
]
: [])
]}
/>
</Box>
)}
<Flex alignItems={'center'} h={'38px'}>
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
<Box mx={3} className="textEllipsis3">
{dataset.name}
</Box>
</Flex>
<Box
flex={1}
className={'textEllipsis3'}
py={1}
wordBreak={'break-all'}
fontSize={'xs'}
color={'myGray.500'}
>
{dataset.intro ||
(dataset.type === DatasetTypeEnum.folder
? t('core.dataset.Folder placeholder')
: t('core.dataset.Intro Placeholder'))}
</Box>
<Flex alignItems={'center'} fontSize={'sm'}>
<Box flex={1}>
<PermissionIconText
defaultPermission={dataset.defaultPermission}
color={'myGray.600'}
/>
</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
</MyTooltip>
))}
</Grid>
)}
{myDatasets.length === 0 && (
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')} flexGrow="1"></EmptyTip>
)}
{!!folderDetail && isPc && (
<Box pt={[4, 6]} ml={[4, 6]}>
<FolderSlideCard
refreshDeps={[folderDetail._id]}
name={folderDetail.name}
intro={folderDetail.intro}
onEdit={() => {
setEditedDataset({
id: folderDetail._id,
name: folderDetail.name,
intro: folderDetail.intro
});
}}
onMove={() => setMoveDataId(folderDetail._id)}
deleteTip={t('dataset.deleteFolderTips')}
onDelete={() => onDeleteDataset(folderDetail._id)}
defaultPer={{
value: folderDetail.defaultPermission,
defaultValue: DatasetDefaultPermission,
onChange: (e) => {
return putDatasetById({
id: folderDetail._id,
defaultPermission: e
});
}
}}
managePer={{
permission: folderDetail.permission,
onGetCollaboratorList: () => getCollaboratorList(folderDetail._id),
permissionList: DatasetPermissionList,
onUpdateCollaborators: ({
tmbIds,
permission
}: {
tmbIds: string[];
permission: number;
}) => {
return postUpdateDatasetCollaborators({
tmbIds,
permission,
datasetId: folderDetail._id
});
},
onDelOneCollaborator: (tmbId: string) =>
deleteDatasetCollaborators({
datasetId: folderDetail._id,
tmbId
})
}}
/>
</Box>
)}
</Flex>
<ConfirmModal />
{editedDataset && (
<EditResourceModal
{...editedDataset}
title={''}
onClose={() => setEditedDataset(undefined)}
onEdit={async (data) => {
await putDatasetById({
id: editedDataset.id,
name: data.name,
intro: data.intro,
avatar: data.avatar
});
loadMyDatasets(parentId);
refetchFolderDetail();
setEditedDataset(undefined);
}}
/>
)}
{!!moveDataId && (
<MoveModal
moveDataId={moveDataId}
onClose={() => setMoveDataId('')}
onSuccess={() => {
refetch();
refetchFolderDetail();
setMoveDataId('');
}}
/>
)}
{!!editPerDataset && (
<ConfigPerModal
avatar={editPerDataset.avatar}
name={editPerDataset.name}
defaultPer={{
value: editPerDataset.defaultPermission,
defaultValue: DatasetDefaultPermission,
onChange: async (e) => {
await putDatasetById({
id: editPerDataset._id,
defaultPermission: e
});
refetch();
}
}}
managePer={{
permission: editPerDataset.permission,
onGetCollaboratorList: () => getCollaboratorList(editPerDataset._id),
permissionList: DatasetPermissionList,
onUpdateCollaborators: ({
tmbIds,
permission
}: {
tmbIds: string[];
permission: number;
}) => {
return postUpdateDatasetCollaborators({
tmbIds,
permission,
datasetId: editPerDataset._id
});
},
onDelOneCollaborator: (tmbId: string) =>
deleteDatasetCollaborators({
datasetId: editPerDataset._id,
tmbId
})
}}
onClose={() => setEditPerDatasetIndex(undefined)}
/>
)}
</>
);
}
export default List;

View File

@@ -0,0 +1,58 @@
import { getDatasetPaths } from '@/web/core/dataset/api';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder/type';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useQuery } from '@tanstack/react-query';
import { useTranslation } from 'next-i18next';
import { useRouter } from 'next/router';
import React from 'react';
import { createContext } from 'use-context-selector';
export type DatasetContextType = {
refetch: () => void;
isFetching: boolean;
paths: ParentTreePathItemType[];
};
export const DatasetContext = createContext<DatasetContextType>({
refetch: () => {},
isFetching: false,
paths: []
});
function DatasetContextProvider({ children }: { children: React.ReactNode }) {
const router = useRouter();
const { toast } = useToast();
const { t } = useTranslation();
const { parentId } = router.query as { parentId: string };
const { loadMyDatasets } = useDatasetStore();
const { data, refetch, isFetching } = useQuery(
['loadDataset', parentId],
() => {
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
},
{
onError(err) {
toast({
status: 'error',
title: t(getErrText(err))
});
}
}
);
const paths = data?.[1] || [];
const contextValue = {
refetch,
isFetching,
paths
};
return <DatasetContext.Provider value={contextValue}>{children}</DatasetContext.Provider>;
}
export default DatasetContextProvider;

View File

@@ -1,144 +1,42 @@
import React, { useMemo, useRef, useState } from 'react';
import { Box, Flex, Grid, useDisclosure, Image, Button } from '@chakra-ui/react';
import React from 'react';
import { Box, Flex, useDisclosure, Image, Button } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import PageContainer from '@/components/PageContainer';
import { useConfirm } from '@fastgpt/web/hooks/useConfirm';
import { AddIcon } from '@chakra-ui/icons';
import { useQuery } from '@tanstack/react-query';
import {
delDatasetById,
getDatasetPaths,
putDatasetById,
postCreateDataset
} from '@/web/core/dataset/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { postCreateDataset } from '@/web/core/dataset/api';
import { useTranslation } from 'next-i18next';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { serviceSideProps } from '@/web/common/utils/i18n';
import dynamic from 'next/dynamic';
import { DatasetTypeEnum, DatasetTypeMap } from '@fastgpt/global/core/dataset/constants';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { FolderImgUrl, FolderIcon } from '@fastgpt/global/common/file/image/constants';
import MyMenu from '@fastgpt/web/components/common/MyMenu';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useEditTitle } from '@/web/common/hooks/useEditTitle';
import EditFolderModal, { useEditFolder } from '../component/EditFolderModal';
import { useDrag } from '@/web/common/hooks/useDrag';
import { useUserStore } from '@/web/support/user/useUserStore';
import PermissionIconText from '@/components/support/permission/IconText';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import ParentPaths from '@/components/common/ParentPaths';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { downloadFetch } from '@/web/common/system/utils';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import List from './component/List';
import { DatasetContext } from './context';
import DatasetContextProvider from './context';
import { useContextSelector } from 'use-context-selector';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
const Dataset = () => {
const { t } = useTranslation();
const { toast } = useToast();
const router = useRouter();
const { parentId } = router.query as { parentId: string };
const { setLoading } = useSystemStore();
const { userInfo } = useUserStore();
const { myDatasets, loadMyDatasets, setMyDatasets } = useDatasetStore();
const DeleteTipsMap = useRef({
[DatasetTypeEnum.folder]: t('dataset.deleteFolderTips'),
[DatasetTypeEnum.dataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.websiteDataset]: t('core.dataset.Delete Confirm'),
[DatasetTypeEnum.externalFile]: t('core.dataset.Delete Confirm')
});
const { openConfirm, ConfirmModal } = useConfirm({
type: 'delete'
});
const { onOpenModal: onOpenTitleModal, EditModal: EditTitleModal } = useEditTitle({
title: t('Rename')
});
const { moveDataId, setMoveDataId, dragStartId, setDragStartId, dragTargetId, setDragTargetId } =
useDrag();
const { myDatasets } = useDatasetStore();
const { parentId } = router.query as { parentId: string };
const {
isOpen: isOpenCreateModal,
onOpen: onOpenCreateModal,
onClose: onCloseCreateModal
} = useDisclosure();
const { editFolderData, setEditFolderData } = useEditFolder();
/* 点击删除 */
const { mutate: onclickDelDataset } = useRequest({
mutationFn: async (id: string) => {
setLoading(true);
await delDatasetById(id);
return id;
},
onSuccess(id: string) {
setMyDatasets(myDatasets.filter((item) => item._id !== id));
},
onSettled() {
setLoading(false);
},
successToast: t('common.Delete Success'),
errorToast: t('dataset.Delete Dataset Error')
});
// check export limit
const { mutate: exportDataset } = useRequest({
mutationFn: async (dataset: DatasetItemType) => {
setLoading(true);
await checkTeamExportDatasetLimit(dataset._id);
await downloadFetch({
url: `/api/core/dataset/exportAll?datasetId=${dataset._id}`,
filename: `${dataset.name}.csv`
});
},
onSuccess() {
toast({
status: 'success',
title: t('core.dataset.Start export')
});
},
onSettled() {
setLoading(false);
},
errorToast: t('dataset.Export Dataset Limit Error')
});
const { data, refetch, isFetching } = useQuery(
['loadDataset', parentId],
() => {
return Promise.all([loadMyDatasets(parentId), getDatasetPaths(parentId)]);
},
{
onError(err) {
toast({
status: 'error',
title: t(getErrText(err))
});
}
}
);
const paths = data?.[1] || [];
const formatDatasets = useMemo(
() =>
myDatasets.map((item) => {
return {
...item,
label: DatasetTypeMap[item.type]?.label,
icon: DatasetTypeMap[item.type]?.icon
};
}),
[myDatasets]
);
const { paths, refetch, isFetching } = useContextSelector(DatasetContext, (v) => v);
return (
<PageContainer
@@ -148,7 +46,7 @@ const Dataset = () => {
<Flex pt={[4, '30px']} alignItems={'center'} justifyContent={'space-between'}>
{/* url path */}
<ParentPaths
paths={paths.map((path, i) => ({
paths={paths.map((path) => ({
parentId: path.parentId,
parentName: path.parentName
}))}
@@ -208,254 +106,7 @@ const Dataset = () => {
/>
)}
</Flex>
<Grid
py={5}
gridTemplateColumns={['1fr', 'repeat(2,1fr)', 'repeat(3,1fr)', 'repeat(4,1fr)']}
gridGap={5}
userSelect={'none'}
>
{formatDatasets.map((dataset) => (
<Box
display={'flex'}
flexDirection={'column'}
key={dataset._id}
py={3}
px={5}
cursor={'pointer'}
borderWidth={1.5}
borderColor={dragTargetId === dataset._id ? 'primary.600' : 'borderColor.low'}
bg={'white'}
borderRadius={'md'}
minH={'130px'}
position={'relative'}
data-drag-id={dataset.type === DatasetTypeEnum.folder ? dataset._id : undefined}
draggable
onDragStart={(e) => {
setDragStartId(dataset._id);
}}
onDragOver={(e) => {
e.preventDefault();
const targetId = e.currentTarget.getAttribute('data-drag-id');
if (!targetId) return;
DatasetTypeEnum.folder && setDragTargetId(targetId);
}}
onDragLeave={(e) => {
e.preventDefault();
setDragTargetId(undefined);
}}
onDrop={async (e) => {
e.preventDefault();
if (!dragTargetId || !dragStartId || dragTargetId === dragStartId) return;
// update parentId
try {
await putDatasetById({
id: dragStartId,
parentId: dragTargetId
});
refetch();
} catch (error) {}
setDragTargetId(undefined);
}}
_hover={{
borderColor: 'primary.300',
boxShadow: '1.5',
'& .delete': {
display: 'block'
}
}}
onClick={() => {
if (dataset.type === DatasetTypeEnum.folder) {
router.push({
pathname: '/dataset/list',
query: {
parentId: dataset._id
}
});
} else {
router.push({
pathname: '/dataset/detail',
query: {
datasetId: dataset._id
}
});
}
}}
>
{userInfo?.team?.permission.hasWritePer && dataset.isOwner && (
<Box
position={'absolute'}
top={3}
right={3}
borderRadius={'md'}
_hover={{
color: 'primary.500',
'& .icon': {
bg: 'myGray.100'
}
}}
onClick={(e) => {
e.stopPropagation();
}}
>
<MyMenu
Button={
<Box w={'22px'} h={'22px'}>
<MyIcon
className="icon"
name={'more'}
h={'16px'}
w={'16px'}
px={1}
py={1}
borderRadius={'md'}
cursor={'pointer'}
/>
</Box>
}
menuList={[
{
children: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'edit'} w={'14px'} mr={2} />
{t('Rename')}
</Flex>
),
onClick: () =>
onOpenTitleModal({
defaultVal: dataset.name,
onSuccess: (val) => {
if (val === dataset.name || !val) return;
putDatasetById({
id: dataset._id,
name: val
});
}
})
},
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'common/file/move'} w={'14px'} mr={2} />
{t('Move')}
</Flex>
),
onClick: () => setMoveDataId(dataset._id)
},
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'export'} w={'14px'} mr={2} />
{t('Export')}
</Flex>
),
onClick: () => {
exportDataset(dataset);
}
},
...(dataset.permission === PermissionTypeEnum.private
? [
{
label: (
<Flex alignItems={'center'}>
<MyIcon
name={'support/permission/publicLight'}
w={'14px'}
mr={2}
/>
{t('permission.Set Public')}
</Flex>
),
onClick: () => {
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.public
});
}
}
]
: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon
name={'support/permission/privateLight'}
w={'14px'}
mr={2}
/>
{t('permission.Set Private')}
</Flex>
),
onClick: () => {
putDatasetById({
id: dataset._id,
permission: PermissionTypeEnum.private
});
}
}
])
]
},
{
children: [
{
label: (
<Flex alignItems={'center'}>
<MyIcon name={'delete'} w={'14px'} mr={2} />
{t('common.Delete')}
</Flex>
),
type: 'danger',
onClick: () => {
openConfirm(
() => onclickDelDataset(dataset._id),
undefined,
DeleteTipsMap.current[dataset.type]
)();
}
}
]
}
]}
/>
</Box>
)}
<Flex alignItems={'center'} h={'38px'}>
<Avatar src={dataset.avatar} borderRadius={'md'} w={'28px'} />
<Box mx={3} className="textEllipsis3">
{dataset.name}
</Box>
</Flex>
<Box
flex={1}
className={'textEllipsis3'}
py={1}
wordBreak={'break-all'}
fontSize={'xs'}
color={'myGray.500'}
>
{dataset.intro ||
(dataset.type === DatasetTypeEnum.folder
? t('core.dataset.Folder placeholder')
: t('core.dataset.Intro Placeholder'))}
</Box>
<Flex alignItems={'center'} fontSize={'sm'}>
<Box flex={1}>
<PermissionIconText permission={dataset.permission} color={'myGray.600'} />
</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
))}
</Grid>
{myDatasets.length === 0 && (
<EmptyTip pt={'35vh'} text={t('core.dataset.Empty Dataset Tips')}></EmptyTip>
)}
<ConfirmModal />
<EditTitleModal />
<List />
{isOpenCreateModal && <CreateModal onClose={onCloseCreateModal} parentId={parentId} />}
{!!editFolderData && (
<EditFolderModal
@@ -477,16 +128,6 @@ const Dataset = () => {
isEdit={false}
/>
)}
{!!moveDataId && (
<MoveModal
moveDataId={moveDataId}
onClose={() => setMoveDataId('')}
onSuccess={() => {
refetch();
setMoveDataId('');
}}
/>
)}
</PageContainer>
);
};
@@ -499,4 +140,12 @@ export async function getServerSideProps(content: any) {
};
}
export default Dataset;
function DatasetContextWrapper() {
return (
<DatasetContextProvider>
<Dataset />
</DatasetContextProvider>
);
}
export default DatasetContextWrapper;

View File

@@ -1,44 +0,0 @@
import { DatasetDataItemType } from '@fastgpt/global/core/dataset/type';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { AuthModeType } from '@fastgpt/service/support/permission/type';
/* data permission same of collection */
export async function authDatasetData({
dataId,
...props
}: AuthModeType & {
dataId: string;
}) {
// get mongo dataset.data
const datasetData = await MongoDatasetData.findById(dataId);
if (!datasetData) {
return Promise.reject('core.dataset.error.Data not found');
}
const result = await authDatasetCollection({
...props,
collectionId: datasetData.collectionId
});
const data: DatasetDataItemType = {
id: String(datasetData._id),
teamId: datasetData.teamId,
q: datasetData.q,
a: datasetData.a,
chunkIndex: datasetData.chunkIndex,
indexes: datasetData.indexes,
datasetId: String(datasetData.datasetId),
collectionId: String(datasetData.collectionId),
sourceName: result.collection.name || '',
sourceId: result.collection?.fileId || result.collection?.rawLink,
isOwner: String(datasetData.tmbId) === result.tmbId,
canWrite: result.canWrite
};
return {
...result,
datasetData: data
};
}

View File

@@ -11,5 +11,5 @@ export const getCollaboratorList = (appId: string) =>
export const postUpdateAppCollaborators = (body: UpdateAppCollaboratorBody) =>
POST('/proApi/core/app/collaborator/update', body);
export const deleteAppCollaborators = ({ ...params }: AppCollaboratorDeleteParams) =>
DELETE('/proApi/core/app/collaborator/delete', { ...params });
export const deleteAppCollaborators = (params: AppCollaboratorDeleteParams) =>
DELETE('/proApi/core/app/collaborator/delete', params);

View File

@@ -0,0 +1,15 @@
import {
UpdateDatasetCollaboratorBody,
DatasetCollaboratorDeleteParams
} from '@fastgpt/global/core/dataset/collaborator';
import { DELETE, GET, POST } from '@/web/common/api/request';
import { CollaboratorItemType } from '@fastgpt/global/support/permission/collaborator';
export const getCollaboratorList = (datasetId: string) =>
GET<CollaboratorItemType[]>('/proApi/core/dataset/collaborator/list', { datasetId });
export const postUpdateDatasetCollaborators = (body: UpdateDatasetCollaboratorBody) =>
POST('/proApi/core/dataset/collaborator/update', body);
export const deleteDatasetCollaborators = ({ ...params }: DatasetCollaboratorDeleteParams) =>
DELETE('/proApi/core/dataset/collaborator/delete', { ...params });

View File

@@ -8,6 +8,8 @@ import type {
DatasetCollectionItemType,
DatasetItemType
} from '@fastgpt/global/core/dataset/type.d';
import { DatasetDefaultPermission } from '@fastgpt/global/support/permission/dataset/constant';
import { DatasetPermission } from '@fastgpt/global/support/permission/dataset/controller';
export const defaultDatasetDetail: DatasetItemType = {
_id: '',
@@ -21,11 +23,10 @@ export const defaultDatasetDetail: DatasetItemType = {
name: '',
intro: '',
status: 'active',
permission: 'private',
isOwner: false,
canWrite: false,
permission: new DatasetPermission(),
vectorModel: defaultVectorModels[0],
agentModel: defaultQAModels[0]
agentModel: defaultQAModels[0],
defaultPermission: DatasetDefaultPermission
};
export const defaultCollectionDetail: DatasetCollectionItemType = {
@@ -44,20 +45,21 @@ export const defaultCollectionDetail: DatasetCollectionItemType = {
name: '',
intro: '',
status: 'active',
permission: 'private',
permission: new DatasetPermission(),
vectorModel: defaultVectorModels[0].model,
agentModel: defaultQAModels[0].model
agentModel: defaultQAModels[0].model,
defaultPermission: DatasetDefaultPermission
},
parentId: '',
name: '',
type: DatasetCollectionTypeEnum.file,
updateTime: new Date(),
canWrite: false,
sourceName: '',
sourceId: '',
createTime: new Date(),
trainingType: TrainingModeEnum.chunk,
chunkSize: 0
chunkSize: 0,
permission: new DatasetPermission()
};
export enum ImportProcessWayEnum {