mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
@@ -1,10 +1,7 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
import { jsonRes } from '@fastgpt/service/common/response';
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
import { connectToDatabase } from '@/service/mongo';
|
||||||
import {
|
import { readFileContentFromMongo } from '@fastgpt/service/common/file/gridfs/controller';
|
||||||
delFileByFileIdList,
|
|
||||||
readFileContentFromMongo
|
|
||||||
} from '@fastgpt/service/common/file/gridfs/controller';
|
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||||
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
import { FileIdCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||||
@@ -24,6 +21,7 @@ import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants'
|
|||||||
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
import { getLLMModel, getVectorModel } from '@fastgpt/service/core/ai/model';
|
||||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||||
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
||||||
|
import { MongoRawTextBuffer } from '@fastgpt/service/common/buffer/rawText/schema';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
const {
|
const {
|
||||||
@@ -139,6 +137,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
|||||||
return collectionId;
|
return collectionId;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// remove buffer
|
||||||
|
await MongoRawTextBuffer.deleteOne({ sourceId: fileId });
|
||||||
|
|
||||||
startTrainingQueue(true);
|
startTrainingQueue(true);
|
||||||
|
|
||||||
jsonRes(res);
|
jsonRes(res);
|
||||||
|
@@ -1,34 +1,24 @@
|
|||||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||||
import { jsonRes } from '@fastgpt/service/common/response';
|
|
||||||
import { connectToDatabase } from '@/service/mongo';
|
|
||||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||||
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
|
import { checkExportDatasetLimit } from '@fastgpt/service/support/user/utils';
|
||||||
|
import { NextAPI } from '@/service/middle/entry';
|
||||||
|
|
||||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||||
try {
|
const { datasetId } = req.query as {
|
||||||
await connectToDatabase();
|
datasetId: string;
|
||||||
const { datasetId } = req.query as {
|
};
|
||||||
datasetId: string;
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!datasetId) {
|
if (!datasetId) {
|
||||||
throw new Error('datasetId is required');
|
throw new Error('datasetId is required');
|
||||||
}
|
|
||||||
|
|
||||||
// 凭证校验
|
|
||||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
|
||||||
|
|
||||||
await checkExportDatasetLimit({
|
|
||||||
teamId,
|
|
||||||
limitMinutes: global.feConfigs?.limit?.exportDatasetLimitMinutes
|
|
||||||
});
|
|
||||||
|
|
||||||
jsonRes(res);
|
|
||||||
} catch (err) {
|
|
||||||
res.status(500);
|
|
||||||
jsonRes(res, {
|
|
||||||
code: 500,
|
|
||||||
error: err
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 凭证校验
|
||||||
|
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'w' });
|
||||||
|
|
||||||
|
await checkExportDatasetLimit({
|
||||||
|
teamId,
|
||||||
|
limitMinutes: global.feConfigs?.limit?.exportDatasetLimitMinutes
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export default NextAPI(handler);
|
||||||
|
@@ -13,7 +13,8 @@ export const NextAPI = (...args: NextApiHandler[]): NextApiHandler => {
|
|||||||
response = await handler(req, res);
|
response = await handler(req, res);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!res.writableFinished) {
|
const contentType = res.getHeader('Content-Type');
|
||||||
|
if ((!contentType || contentType === 'application/json') && !res.writableFinished) {
|
||||||
return jsonRes(res, {
|
return jsonRes(res, {
|
||||||
code: 200,
|
code: 200,
|
||||||
data: response
|
data: response
|
||||||
|
Reference in New Issue
Block a user