mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-02 20:58:12 +00:00
External dataset (#1485)
* fix: revert version * feat: external collection * import context * external ui * doc * fix: ts * clear invalid data * feat: rename sub name * fix: node if else edge remove * fix: init * api size * fix: if else node refresh
This commit is contained in:
@@ -58,17 +58,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
try {
|
||||
await connectToDatabase();
|
||||
await authCert({ req, authRoot: true });
|
||||
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };
|
||||
|
||||
(async () => {
|
||||
try {
|
||||
console.log('执行脏数据清理任务');
|
||||
// 360天 ~ 2小时前
|
||||
const end = addHours(new Date(), -2);
|
||||
const start = addHours(new Date(), -360 * 24);
|
||||
await checkInvalidDatasetFiles(start, end);
|
||||
await checkInvalidImg(start, end);
|
||||
await checkInvalidDatasetData(start, end);
|
||||
await checkInvalidVector(start, end);
|
||||
const endTime = addHours(new Date(), start);
|
||||
const startTime = addHours(new Date(), end);
|
||||
await checkInvalidDatasetFiles(startTime, endTime);
|
||||
await checkInvalidImg(startTime, endTime);
|
||||
await checkInvalidDatasetData(startTime, endTime);
|
||||
await checkInvalidVector(startTime, endTime);
|
||||
console.log('执行脏数据清理任务完毕');
|
||||
} catch (error) {
|
||||
console.log('执行脏数据清理任务出错了');
|
||||
|
@@ -141,11 +141,18 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
const collections = await connectionMongo.connection.db
|
||||
.listCollections({ name: 'team.members' })
|
||||
.toArray();
|
||||
|
||||
if (collections.length > 0) {
|
||||
const sourceCol = connectionMongo.connection.db.collection('team.members');
|
||||
const targetCol = connectionMongo.connection.db.collection('team_members');
|
||||
|
||||
await sourceCol.rename('team_members', { dropTarget: true });
|
||||
console.log('success rename team.members -> team_members');
|
||||
if ((await targetCol.countDocuments()) > 1) {
|
||||
// 除了root
|
||||
console.log('team_members 中有数据,无法自动将 buffer.tts 迁移到 team_members,请手动操作');
|
||||
} else {
|
||||
await sourceCol.rename('team_members', { dropTarget: true });
|
||||
console.log('success rename team.members -> team_members');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error: rename team.members -> team_members', error);
|
||||
@@ -170,6 +177,27 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
console.log('error: rename team.tags -> team_tags', error);
|
||||
}
|
||||
|
||||
try {
|
||||
const collections = await connectionMongo.connection.db
|
||||
.listCollections({ name: 'team.subscriptions' })
|
||||
.toArray();
|
||||
if (collections.length > 0) {
|
||||
const sourceCol = connectionMongo.connection.db.collection('team.subscriptions');
|
||||
const targetCol = connectionMongo.connection.db.collection('team_subscriptions');
|
||||
|
||||
if ((await targetCol.countDocuments()) > 0) {
|
||||
console.log(
|
||||
'team_subscriptions 中有数据,无法自动将 team.subscriptions 迁移到 team_subscriptions,请手动操作'
|
||||
);
|
||||
} else {
|
||||
await sourceCol.rename('team_subscriptions', { dropTarget: true });
|
||||
console.log('success rename team.subscriptions -> team_subscriptions');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('error: rename team.subscriptions -> team_subscriptions', error);
|
||||
}
|
||||
|
||||
jsonRes(res, {
|
||||
message: 'success'
|
||||
});
|
||||
|
@@ -28,7 +28,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
sizeLimit: '10mb',
|
||||
bodyParser: {
|
||||
sizeLimit: '16mb'
|
||||
}
|
||||
|
@@ -1,45 +1,31 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
|
||||
import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import type { DatasetSimpleItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { mongoRPermission } from '@fastgpt/global/support/permission/utils';
|
||||
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { NextAPI } from '@/service/middle/entry';
|
||||
|
||||
/* get all dataset by teamId or tmbId */
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
// 凭证校验
|
||||
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
|
||||
async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse<any>
|
||||
): Promise<DatasetSimpleItemType[]> {
|
||||
// 凭证校验
|
||||
const { teamId, tmbId, teamOwner, role } = await authUserRole({ req, authToken: true });
|
||||
|
||||
const datasets = await MongoDataset.find({
|
||||
...mongoRPermission({ teamId, tmbId, role }),
|
||||
type: { $ne: DatasetTypeEnum.folder }
|
||||
}).lean();
|
||||
const datasets = await MongoDataset.find({
|
||||
...mongoRPermission({ teamId, tmbId, role }),
|
||||
type: { $ne: DatasetTypeEnum.folder }
|
||||
}).lean();
|
||||
|
||||
const data = datasets.map((item) => ({
|
||||
_id: item._id,
|
||||
parentId: item.parentId,
|
||||
avatar: item.avatar,
|
||||
name: item.name,
|
||||
intro: item.intro,
|
||||
type: item.type,
|
||||
permission: item.permission,
|
||||
vectorModel: getVectorModel(item.vectorModel),
|
||||
canWrite: String(item.tmbId) === tmbId,
|
||||
isOwner: teamOwner || String(item.tmbId) === tmbId
|
||||
}));
|
||||
|
||||
jsonRes<DatasetListItemType[]>(res, {
|
||||
data
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
return datasets.map((item) => ({
|
||||
_id: item._id,
|
||||
avatar: item.avatar,
|
||||
name: item.name,
|
||||
vectorModel: getVectorModel(item.vectorModel)
|
||||
}));
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
@@ -9,7 +9,7 @@ import { getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||
import { NextAPI } from '@/service/middle/entry';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
|
||||
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||
// 凭证校验
|
||||
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
|
||||
req,
|
||||
|
@@ -8,8 +8,18 @@ import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { id, parentId, name, avatar, intro, permission, agentModel, websiteConfig, status } =
|
||||
req.body as DatasetUpdateBody;
|
||||
const {
|
||||
id,
|
||||
parentId,
|
||||
name,
|
||||
avatar,
|
||||
intro,
|
||||
permission,
|
||||
agentModel,
|
||||
websiteConfig,
|
||||
externalReadUrl,
|
||||
status
|
||||
} = req.body as DatasetUpdateBody;
|
||||
|
||||
if (!id) {
|
||||
throw new Error('缺少参数');
|
||||
@@ -33,7 +43,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
...(agentModel && { agentModel: agentModel.model }),
|
||||
...(websiteConfig && { websiteConfig }),
|
||||
...(status && { status }),
|
||||
...(intro && { intro })
|
||||
...(intro && { intro }),
|
||||
...(externalReadUrl && { externalReadUrl })
|
||||
}
|
||||
);
|
||||
|
||||
|
@@ -9,7 +9,7 @@ import { PluginListItemType } from '@fastgpt/global/core/plugin/controller';
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { parentId, type } = req.query as { parentId?: string; type?: `${DatasetTypeEnum}` };
|
||||
const { parentId, type } = req.query as { parentId?: string; type?: DatasetTypeEnum };
|
||||
|
||||
const { teamId } = await authCert({ req, authToken: true });
|
||||
|
||||
|
@@ -82,7 +82,7 @@ export default NextAPI(handler);
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: {
|
||||
sizeLimit: '10mb'
|
||||
sizeLimit: '20mb'
|
||||
},
|
||||
responseLimit: '20mb'
|
||||
}
|
||||
|
@@ -43,6 +43,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: {
|
||||
sizeLimit: '16mb'
|
||||
},
|
||||
responseLimit: '16mb'
|
||||
}
|
||||
};
|
||||
|
@@ -523,6 +523,9 @@ const authHeaderRequest = async ({
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: {
|
||||
sizeLimit: '20mb'
|
||||
},
|
||||
responseLimit: '20mb'
|
||||
}
|
||||
};
|
||||
|
Reference in New Issue
Block a user