mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-30 02:12:38 +00:00
V4.6.7-production (#759)
This commit is contained in:
@@ -271,28 +271,32 @@ const UserInfo = () => {
|
||||
)}
|
||||
</Flex>
|
||||
</Box>
|
||||
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
|
||||
<Flex alignItems={'center'}>
|
||||
<Box flex={'1 0 0'} fontSize={'md'}>
|
||||
{t('support.user.team.Dataset usage')}: {datasetUsageMap.usedSize}/
|
||||
{datasetSub.maxSize}
|
||||
{feConfigs?.show_pay && (
|
||||
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
|
||||
<Flex alignItems={'center'}>
|
||||
<Box flex={'1 0 0'} fontSize={'md'}>
|
||||
{t('support.user.team.Dataset usage')}: {datasetUsageMap.usedSize}/
|
||||
{datasetSub.maxSize}
|
||||
</Box>
|
||||
{userInfo?.team?.canWrite && (
|
||||
<Button size={'sm'} onClick={onOpenSubDatasetModal}>
|
||||
{t('support.wallet.Buy more')}
|
||||
</Button>
|
||||
)}
|
||||
</Flex>
|
||||
<Box mt={1}>
|
||||
<Progress
|
||||
value={datasetUsageMap.value}
|
||||
colorScheme={datasetUsageMap.colorScheme}
|
||||
borderRadius={'md'}
|
||||
isAnimated
|
||||
hasStripe
|
||||
borderWidth={'1px'}
|
||||
borderColor={'borderColor.base'}
|
||||
/>
|
||||
</Box>
|
||||
<Button size={'sm'} onClick={onOpenSubDatasetModal}>
|
||||
{t('support.wallet.Buy more')}
|
||||
</Button>
|
||||
</Flex>
|
||||
<Box mt={1}>
|
||||
<Progress
|
||||
value={datasetUsageMap.value}
|
||||
colorScheme={datasetUsageMap.colorScheme}
|
||||
borderRadius={'md'}
|
||||
isAnimated
|
||||
hasStripe
|
||||
borderWidth={'1px'}
|
||||
borderColor={'borderColor.base'}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
|
@@ -1,85 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { getUploadModel } from '@fastgpt/service/common/file/multer';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { FileCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api';
|
||||
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
|
||||
/**
|
||||
* Creates the multer uploader
|
||||
*/
|
||||
const upload = getUploadModel({
|
||||
maxSize: 500 * 1024 * 1024
|
||||
});
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
let filePaths: string[] = [];
|
||||
|
||||
const { datasetId } = req.query as { datasetId: string };
|
||||
|
||||
try {
|
||||
await connectToDatabase();
|
||||
|
||||
const { teamId, tmbId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
per: 'w',
|
||||
datasetId
|
||||
});
|
||||
|
||||
const { file, bucketName, data } = await upload.doUpload<FileCreateDatasetCollectionParams>(
|
||||
req,
|
||||
res
|
||||
);
|
||||
filePaths = [file.path];
|
||||
|
||||
if (!file || !bucketName) {
|
||||
throw new Error('file is empty');
|
||||
}
|
||||
|
||||
const { fileMetadata, collectionMetadata, ...collectionData } = data;
|
||||
|
||||
// upload file and create collection
|
||||
const fileId = await uploadFile({
|
||||
teamId,
|
||||
tmbId,
|
||||
bucketName,
|
||||
path: file.path,
|
||||
filename: file.originalname,
|
||||
contentType: file.mimetype,
|
||||
metadata: fileMetadata
|
||||
});
|
||||
|
||||
// create collection
|
||||
const collectionId = await createOneCollection({
|
||||
...collectionData,
|
||||
metadata: collectionMetadata,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.file,
|
||||
fileId
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: collectionId
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
|
||||
removeFilesByPaths(filePaths);
|
||||
}
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false
|
||||
}
|
||||
};
|
@@ -1,91 +0,0 @@
|
||||
/*
|
||||
Create one dataset collection
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
TrainingModeEnum,
|
||||
DatasetCollectionTypeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
|
||||
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
|
||||
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
|
||||
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
|
||||
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
|
||||
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
|
||||
import { startQueue } from '@/service/utils/tools';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
link,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as LinkCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: 'w'
|
||||
});
|
||||
|
||||
// 1. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
|
||||
insertLen: predictDataLimitLength(trainingType, new Array(10))
|
||||
});
|
||||
|
||||
// 2. create collection
|
||||
const collectionId = await createOneCollection({
|
||||
...body,
|
||||
name: link,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.link,
|
||||
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
|
||||
rawLink: link
|
||||
});
|
||||
|
||||
// 3. create bill and start sync
|
||||
const { billId } = await createTrainingBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: 'core.dataset.collection.Sync Collection',
|
||||
billSource: BillSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel).name,
|
||||
agentModel: getQAModel(dataset.agentModel).name
|
||||
});
|
||||
await reloadCollectionChunks({
|
||||
collectionId,
|
||||
tmbId,
|
||||
billId
|
||||
});
|
||||
|
||||
startQueue();
|
||||
|
||||
jsonRes(res, {
|
||||
data: { collectionId }
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
}
|
@@ -1,117 +0,0 @@
|
||||
/*
|
||||
Create one dataset collection
|
||||
*/
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
|
||||
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import {
|
||||
TrainingModeEnum,
|
||||
DatasetCollectionTypeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
||||
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
|
||||
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
|
||||
import { pushDataToTrainingQueue } from '@/service/core/dataset/data/controller';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
|
||||
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
|
||||
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const {
|
||||
name,
|
||||
text,
|
||||
trainingType = TrainingModeEnum.chunk,
|
||||
chunkSize = 512,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
...body
|
||||
} = req.body as TextCreateDatasetCollectionParams;
|
||||
|
||||
const { teamId, tmbId, dataset } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId: body.datasetId,
|
||||
per: 'w'
|
||||
});
|
||||
|
||||
// 1. split text to chunks
|
||||
const { chunks } = splitText2Chunks({
|
||||
text,
|
||||
chunkLen: chunkSize,
|
||||
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
|
||||
customReg: chunkSplitter ? [chunkSplitter] : []
|
||||
});
|
||||
|
||||
// 2. check dataset limit
|
||||
await checkDatasetLimit({
|
||||
teamId,
|
||||
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
|
||||
insertLen: predictDataLimitLength(trainingType, chunks)
|
||||
});
|
||||
|
||||
// 3. create collection and training bill
|
||||
const [collectionId, { billId }] = await Promise.all([
|
||||
createOneCollection({
|
||||
...body,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.virtual,
|
||||
|
||||
name,
|
||||
trainingType,
|
||||
chunkSize,
|
||||
chunkSplitter,
|
||||
qaPrompt,
|
||||
|
||||
hashRawText: hashStr(text),
|
||||
rawTextLength: text.length
|
||||
}),
|
||||
createTrainingBill({
|
||||
teamId,
|
||||
tmbId,
|
||||
appName: name,
|
||||
billSource: BillSourceEnum.training,
|
||||
vectorModel: getVectorModel(dataset.vectorModel)?.name,
|
||||
agentModel: getQAModel(dataset.agentModel)?.name
|
||||
})
|
||||
]);
|
||||
|
||||
// 4. push chunks to training queue
|
||||
const insertResults = await pushDataToTrainingQueue({
|
||||
teamId,
|
||||
tmbId,
|
||||
collectionId,
|
||||
trainingMode: trainingType,
|
||||
prompt: qaPrompt,
|
||||
billId,
|
||||
data: chunks.map((text, index) => ({
|
||||
q: text,
|
||||
chunkIndex: index
|
||||
}))
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: { collectionId, results: insertResults }
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: {
|
||||
sizeLimit: '10mb'
|
||||
}
|
||||
}
|
||||
};
|
@@ -6,6 +6,7 @@ import type { CreateDatasetParams } from '@/global/core/dataset/api.d';
|
||||
import { createDefaultCollection } from '@fastgpt/service/core/dataset/collection/controller';
|
||||
import { authUserNotVisitor } from '@fastgpt/service/support/permission/auth/user';
|
||||
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
try {
|
||||
@@ -13,18 +14,18 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
const {
|
||||
parentId,
|
||||
name,
|
||||
type,
|
||||
type = DatasetTypeEnum.dataset,
|
||||
avatar,
|
||||
vectorModel = global.vectorModels[0].model,
|
||||
agentModel = global.qaModels[0].model
|
||||
} = req.body as CreateDatasetParams;
|
||||
|
||||
// auth
|
||||
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true });
|
||||
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true, authApiKey: true });
|
||||
|
||||
// check model valid
|
||||
const vectorModelStore = global.vectorModels.find((item) => item.model === vectorModel);
|
||||
const agentModelStore = global.qaModels.find((item) => item.model === agentModel);
|
||||
const vectorModelStore = getVectorModel(vectorModel);
|
||||
const agentModelStore = getQAModel(agentModel);
|
||||
if (!vectorModelStore || !agentModelStore) {
|
||||
throw new Error('vectorModel or qaModel is invalid');
|
||||
}
|
||||
|
@@ -18,7 +18,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
}
|
||||
|
||||
// auth owner
|
||||
const { teamId } = await authDataset({ req, authToken: true, datasetId, per: 'owner' });
|
||||
const { teamId } = await authDataset({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
datasetId,
|
||||
per: 'owner'
|
||||
});
|
||||
|
||||
const datasets = await findDatasetAndAllChildren({
|
||||
teamId,
|
||||
|
@@ -1,64 +0,0 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { request } from '@fastgpt/service/common/api/plusRequest';
|
||||
import type { Method } from 'axios';
|
||||
import { setCookie } from '@fastgpt/service/support/permission/controller';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
|
||||
const method = (req.method || 'POST') as Method;
|
||||
const { path = [], ...query } = req.query as any;
|
||||
|
||||
const url = `/${path?.join('/')}?${new URLSearchParams(query).toString()}`;
|
||||
|
||||
if (!url) {
|
||||
throw new Error('url is empty');
|
||||
}
|
||||
|
||||
const data = req.body || query;
|
||||
|
||||
const repose = await request(
|
||||
url,
|
||||
data,
|
||||
{
|
||||
headers: {
|
||||
...req.headers,
|
||||
// @ts-ignore
|
||||
rootkey: undefined
|
||||
}
|
||||
},
|
||||
method
|
||||
);
|
||||
|
||||
/* special response */
|
||||
// response cookie
|
||||
if (repose?.cookie) {
|
||||
setCookie(res, repose.cookie);
|
||||
|
||||
return jsonRes(res, {
|
||||
data: repose?.cookie
|
||||
});
|
||||
}
|
||||
|
||||
jsonRes(res, {
|
||||
data: repose
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: {
|
||||
sizeLimit: '10mb'
|
||||
},
|
||||
responseLimit: '10mb'
|
||||
}
|
||||
};
|
56
projects/app/src/pages/api/proApi/[...path].ts
Normal file
56
projects/app/src/pages/api/proApi/[...path].ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { request } from 'http';
|
||||
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
|
||||
import url from 'url';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { path = [], ...query } = req.query as any;
|
||||
const requestPath = `/api/${path?.join('/')}?${new URLSearchParams(query).toString()}`;
|
||||
|
||||
if (!requestPath) {
|
||||
throw new Error('url is empty');
|
||||
}
|
||||
|
||||
const parsedUrl = url.parse(FastGPTProUrl);
|
||||
|
||||
delete req.headers?.rootkey;
|
||||
|
||||
const requestResult = request({
|
||||
protocol: parsedUrl.protocol,
|
||||
hostname: parsedUrl.hostname,
|
||||
port: parsedUrl.port,
|
||||
path: requestPath,
|
||||
method: req.method,
|
||||
headers: req.headers
|
||||
});
|
||||
req.pipe(requestResult);
|
||||
|
||||
requestResult.on('response', (response) => {
|
||||
Object.keys(response.headers).forEach((key) => {
|
||||
// @ts-ignore
|
||||
res.setHeader(key, response.headers[key]);
|
||||
});
|
||||
response.statusCode && res.writeHead(response.statusCode);
|
||||
response.pipe(res);
|
||||
});
|
||||
requestResult.on('error', (e) => {
|
||||
res.send(e);
|
||||
res.end();
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false
|
||||
}
|
||||
};
|
@@ -87,7 +87,7 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
|
||||
onError(err: any) {
|
||||
router.replace(`/dataset/list`);
|
||||
toast({
|
||||
title: getErrText(err, t('common.Load Failed')),
|
||||
title: t(getErrText(err, t('common.Load Failed'))),
|
||||
status: 'error'
|
||||
});
|
||||
}
|
||||
|
@@ -46,13 +46,15 @@ import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant'
|
||||
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import ParentPaths from '@/components/common/ParentPaths';
|
||||
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
|
||||
import { useToast } from '@/web/common/hooks/useToast';
|
||||
import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
|
||||
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
|
||||
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
|
||||
|
||||
const Kb = () => {
|
||||
const { t } = useTranslation();
|
||||
const theme = useTheme();
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
const { parentId } = router.query as { parentId: string };
|
||||
const { setLoading } = useSystemStore();
|
||||
@@ -115,9 +117,20 @@ const Kb = () => {
|
||||
errorToast: t('dataset.Export Dataset Limit Error')
|
||||
});
|
||||
|
||||
const { data, refetch, isFetching } = useQuery(['loadDataset', parentId], () => {
|
||||
return Promise.all([loadDatasets(parentId), getDatasetPaths(parentId)]);
|
||||
});
|
||||
const { data, refetch, isFetching } = useQuery(
|
||||
['loadDataset', parentId],
|
||||
() => {
|
||||
return Promise.all([loadDatasets(parentId), getDatasetPaths(parentId)]);
|
||||
},
|
||||
{
|
||||
onError(err) {
|
||||
toast({
|
||||
status: 'error',
|
||||
title: t(getErrText(err))
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
const paths = data?.[1] || [];
|
||||
|
||||
|
@@ -106,9 +106,9 @@ const provider = ({ code, state, error }: { code: string; state: string; error?:
|
||||
export async function getServerSideProps(content: any) {
|
||||
return {
|
||||
props: {
|
||||
code: content?.query?.code,
|
||||
state: content?.query?.state,
|
||||
error: content?.query?.error,
|
||||
code: content?.query?.code || '',
|
||||
state: content?.query?.state || '',
|
||||
error: content?.query?.error || '',
|
||||
...(await serviceSideProps(content))
|
||||
}
|
||||
};
|
||||
|
Reference in New Issue
Block a user