4.6.7 first pr (#726)

This commit is contained in:
Archer
2024-01-10 23:35:04 +08:00
committed by GitHub
parent 414b693303
commit 006ad17c6a
186 changed files with 2996 additions and 1838 deletions

View File

@@ -1,4 +1,4 @@
import React, { useCallback, useRef } from 'react';
import React, { useCallback, useMemo, useRef } from 'react';
import {
Box,
Flex,
@@ -8,7 +8,8 @@ import {
Divider,
Select,
Input,
Link
Link,
Progress
} from '@chakra-ui/react';
import { useForm } from 'react-hook-form';
import { UserUpdateParams } from '@/types/user';
@@ -22,7 +23,6 @@ import { compressImgFileAndUpload } from '@/web/common/file/controller';
import { feConfigs, systemVersion } from '@/web/common/system/staticData';
import { useTranslation } from 'next-i18next';
import { timezoneList } from '@fastgpt/global/common/time/timezone';
import Loading from '@/components/Loading';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyTooltip from '@/components/MyTooltip';
@@ -32,20 +32,14 @@ import MySelect from '@/components/Select';
import { formatStorePrice2Read } from '@fastgpt/global/support/wallet/bill/tools';
import { putUpdateMemberName } from '@/web/support/user/team/api';
import { getDocPath } from '@/web/common/system/doc';
import { getTeamDatasetValidSub } from '@/web/support/wallet/sub/api';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const TeamMenu = dynamic(() => import('@/components/support/user/team/TeamMenu'));
const PayModal = dynamic(() => import('./PayModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const UpdatePswModal = dynamic(() => import('./UpdatePswModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const OpenAIAccountModal = dynamic(() => import('./OpenAIAccountModal'), {
loading: () => <Loading fixed={false} />,
ssr: false
});
const PayModal = dynamic(() => import('./PayModal'));
const UpdatePswModal = dynamic(() => import('./UpdatePswModal'));
const OpenAIAccountModal = dynamic(() => import('./OpenAIAccountModal'));
const SubDatasetModal = dynamic(() => import('@/components/support/wallet/SubDatasetModal'));
const UserInfo = () => {
const theme = useTheme();
@@ -69,6 +63,11 @@ const UserInfo = () => {
onOpen: onOpenUpdatePsw
} = useDisclosure();
const { isOpen: isOpenOpenai, onClose: onCloseOpenai, onOpen: onOpenOpenai } = useDisclosure();
const {
isOpen: isOpenSubDatasetModal,
onClose: onCloseSubDatasetModal,
onOpen: onOpenSubDatasetModal
} = useDisclosure();
const { File, onOpen: onOpenSelectFile } = useSelectFile({
fileType: '.jpg,.png',
@@ -97,6 +96,7 @@ const UserInfo = () => {
if (!file || !userInfo) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.userAvatar,
file,
maxW: 300,
maxH: 300
@@ -122,6 +122,27 @@ const UserInfo = () => {
}
});
const { data: datasetSub = { maxSize: 0, usedSize: 0 } } = useQuery(
['getTeamDatasetValidSub'],
getTeamDatasetValidSub
);
const datasetUsageMap = useMemo(() => {
const rate = datasetSub.usedSize / datasetSub.maxSize;
const colorScheme = (() => {
if (rate < 0.5) return 'green';
if (rate < 0.8) return 'yellow';
return 'red';
})();
return {
colorScheme,
value: rate * 100,
maxSize: datasetSub.maxSize,
usedSize: datasetSub.usedSize
};
}, [datasetSub.maxSize, datasetSub.usedSize]);
return (
<Box
display={['block', 'flex']}
@@ -233,21 +254,48 @@ const UserInfo = () => {
{t('user.Change')}
</Button>
</Flex>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'} fontSize={'md'}>
{t('user.team.Balance')}:&nbsp;
{feConfigs.isPlus && (
<>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'0 0 80px'} fontSize={'md'}>
{t('user.team.Balance')}:&nbsp;
</Box>
<Box flex={1}>
<strong>{formatStorePrice2Read(userInfo?.team?.balance).toFixed(3)}</strong>
</Box>
{feConfigs?.show_pay && userInfo?.team?.canWrite && (
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
)}
</Flex>
</Box>
<Box flex={1}>
<strong>{formatStorePrice2Read(userInfo?.team?.balance).toFixed(3)}</strong>
<Box mt={6} whiteSpace={'nowrap'} w={['85%', '300px']}>
<Flex alignItems={'center'}>
<Box flex={'1 0 0'} fontSize={'md'}>
{t('support.user.team.Dataset usage')}:&nbsp;{datasetUsageMap.usedSize}/
{datasetSub.maxSize}
</Box>
<Button size={'sm'} onClick={onOpenSubDatasetModal}>
{t('support.wallet.Buy more')}
</Button>
</Flex>
<Box mt={1}>
<Progress
value={datasetUsageMap.value}
colorScheme={datasetUsageMap.colorScheme}
borderRadius={'md'}
isAnimated
hasStripe
borderWidth={'1px'}
borderColor={'borderColor.base'}
/>
</Box>
</Box>
{feConfigs?.show_pay && userInfo?.team?.canWrite && (
<Button size={['sm', 'md']} ml={5} onClick={onOpenPayModal}>
{t('user.Pay')}
</Button>
)}
</Flex>
</Box>
</>
)}
{feConfigs?.docUrl && (
<Link
href={getDocPath('/docs/intro')}
@@ -344,9 +392,10 @@ const UserInfo = () => {
onClose={onCloseOpenai}
/>
)}
{isOpenSubDatasetModal && <SubDatasetModal onClose={onCloseSubDatasetModal} />}
<File onSelect={onSelectFile} />
</Box>
);
};
export default UserInfo;
export default React.memo(UserInfo);

View File

@@ -46,12 +46,12 @@ const BillTable = () => {
}}
>
<Flex alignItems={'center'} justifyContent={'space-between'}>
<Box>{item.title}</Box>
<Box fontWeight={'bold'}>{item.title}</Box>
<Box ml={2} color={'myGray.500'}>
{formatTimeToChatTime(item.time)}
</Box>
</Flex>
<Box fontSize={'sm'} color={'myGray.600'}>
<Box fontSize={'sm'} color={'myGray.600'} whiteSpace={'pre-wrap'}>
{item.content}
</Box>
{!item.read && (

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel, removeFilesByPaths } from '@fastgpt/service/common/file/upload/multer';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
/**
* Creates the multer uploader
@@ -16,12 +16,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
let filePaths: string[] = [];
try {
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
const { files, bucketName, metadata } = await upload.doUpload(req, res);
filePaths = files.map((file) => file.path);
await connectToDatabase();
const { userId, teamId, tmbId } = await authCert({ req, authToken: true });
if (!bucketName) {
throw new Error('bucketName is empty');
@@ -53,8 +54,6 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
error
});
}
removeFilesByPaths(filePaths);
}
export const config = {

View File

@@ -8,15 +8,13 @@ import { UploadImgProps } from '@fastgpt/global/common/file/api';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { base64Img, expiredTime, metadata, shareId } = req.body as UploadImgProps;
const { shareId, ...body } = req.body as UploadImgProps;
const { teamId } = await authCertOrShareId({ req, shareId, authToken: true });
const data = await uploadMongoImg({
teamId,
base64Img,
expiredTime,
metadata
...body
});
jsonRes(res, { data });

View File

@@ -59,39 +59,44 @@ const defaultFeConfigs: FastGPTFeConfigsType = {
};
export async function getInitConfig() {
if (global.systemInitd) return;
global.systemInitd = true;
try {
if (global.feConfigs) return;
await connectToDatabase();
initGlobal();
await initSystemConfig();
await Promise.all([
initGlobal(),
initSystemConfig(),
getSimpleModeTemplates(),
getSystemVersion(),
getSystemPlugin()
]);
console.log({
simpleModeTemplates: global.simpleModeTemplates,
communityPlugins: global.communityPlugins
});
} catch (error) {
console.error('Load init config error', error);
global.systemInitd = false;
if (!global.feConfigs) {
exit(1);
}
}
await getSimpleModeTemplates();
}
getSystemVersion();
getSystemPlugin();
export function initGlobal() {
if (global.communityPlugins) return;
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
simpleModeTemplates: global.simpleModeTemplates,
communityPlugins: global.communityPlugins
});
global.communityPlugins = [];
global.simpleModeTemplates = [];
global.qaQueueLen = global.qaQueueLen ?? 0;
global.vectorQueueLen = global.vectorQueueLen ?? 0;
// init tikToken
getTikTokenEnc();
initHttpAgent();
}
export async function initSystemConfig() {
@@ -137,19 +142,24 @@ export async function initSystemConfig() {
global.reRankModels = config.reRankModels;
global.audioSpeechModels = config.audioSpeechModels;
global.whisperModel = config.whisperModel;
}
export function initGlobal() {
global.communityPlugins = [];
global.simpleModeTemplates = [];
global.qaQueueLen = global.qaQueueLen ?? 0;
global.vectorQueueLen = global.vectorQueueLen ?? 0;
// init tikToken
getTikTokenEnc();
initHttpAgent();
console.log({
feConfigs: global.feConfigs,
systemEnv: global.systemEnv,
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel
});
}
export function getSystemVersion() {
if (global.systemVersion) return;
try {
if (process.env.NODE_ENV === 'development') {
global.systemVersion = process.env.npm_package_version || '0.0.0';

View File

@@ -1,31 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { initSystemConfig } from './getInitData';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await initSystemConfig();
console.log(`refresh config`);
console.log({
chatModels: global.chatModels,
qaModels: global.qaModels,
cqModels: global.cqModels,
extractModels: global.extractModels,
qgModels: global.qgModels,
vectorModels: global.vectorModels,
reRankModels: global.reRankModels,
audioSpeechModels: global.audioSpeechModels,
whisperModel: global.whisperModel,
feConfigs: global.feConfigs,
systemEnv: global.systemEnv
});
} catch (error) {
console.log(error);
}
jsonRes(res);
}

View File

@@ -29,6 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await MongoChatItem.findOneAndUpdate(
{
chatId,
dataId: chatItemId
},
{

View File

@@ -0,0 +1,88 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { LinkCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { TrainingModeEnum, DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { createTrainingBill } from '@fastgpt/service/support/wallet/bill/controller';
import { BillSourceEnum } from '@fastgpt/global/support/wallet/bill/constants';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { reloadCollectionChunks } from '@fastgpt/service/core/dataset/collection/utils';
import { startQueue } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
link,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as LinkCreateDatasetCollectionParams;
const { teamId, tmbId, dataset } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
});
// 1. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, new Array(10))
});
// 2. create collection
const collectionId = await createOneCollection({
...body,
name: link,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.link,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
rawLink: link
});
// 3. create bill and start sync
const { billId } = await createTrainingBill({
teamId,
tmbId,
appName: 'core.dataset.collection.Sync Collection',
billSource: BillSourceEnum.training,
vectorModel: getVectorModel(dataset.vectorModel).name,
agentModel: getQAModel(dataset.agentModel).name
});
await reloadCollectionChunks({
collectionId,
tmbId,
billId
});
startQueue();
jsonRes(res, {
data: { collectionId }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -0,0 +1,90 @@
/*
Create one dataset collection
*/
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { TextCreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
import { TrainingModeEnum, DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataToDatasetCollection } from '@/service/core/dataset/data/controller';
import { hashStr } from '@fastgpt/global/common/string/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const {
text,
trainingType = TrainingModeEnum.chunk,
chunkSize = 512,
chunkSplitter,
qaPrompt,
...body
} = req.body as TextCreateDatasetCollectionParams;
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'w'
});
// 1. split text to chunks
const { chunks } = splitText2Chunks({
text,
chunkLen: chunkSize,
overlapRatio: trainingType === TrainingModeEnum.chunk ? 0.2 : 0,
customReg: chunkSplitter ? [chunkSplitter] : [],
countTokens: false
});
// 2. check dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(trainingType, chunks)
});
// 3. create collection
const collectionId = await createOneCollection({
...body,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.virtual,
trainingType,
chunkSize,
chunkSplitter,
qaPrompt,
hashRawText: hashStr(text),
rawTextLength: text.length
});
// 4. push chunks to training queue
const insertResults = await pushDataToDatasetCollection({
teamId,
tmbId,
collectionId,
trainingMode: trainingType,
data: chunks.map((text, index) => ({
q: text,
chunkIndex: index
}))
});
jsonRes(res, {
data: { collectionId, results: insertResults }
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -5,7 +5,6 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
import { authUserNotVisitor } from '@fastgpt/service/support/permission/auth/user';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { createOneCollection } from '@fastgpt/service/core/dataset/collection/controller';
@@ -14,13 +13,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await connectToDatabase();
const body = req.body as CreateDatasetCollectionParams;
// auth. not visitor and dataset is public
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true });
await authDataset({
const { teamId, tmbId } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId: body.datasetId,
per: 'r'
per: 'w'
});
jsonRes(res, {

View File

@@ -4,13 +4,12 @@ import { connectToDatabase } from '@/service/mongo';
import { findCollectionAndChild } from '@fastgpt/service/core/dataset/collection/utils';
import { delCollectionRelevantData } from '@fastgpt/service/core/dataset/data/controller';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { collectionId } = req.query as { collectionId: string };
const { id: collectionId } = req.query as { id: string };
if (!collectionId) {
throw new Error('CollectionIdId is required');
@@ -19,6 +18,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId,
per: 'w'
});

View File

@@ -22,6 +22,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { collection, canWrite } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'r'
});

View File

@@ -11,7 +11,6 @@ import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant
import { startQueue } from '@/service/utils/tools';
import { authDataset } from '@fastgpt/service/support/permission/auth/dataset';
import { DatasetDataCollectionName } from '@fastgpt/service/core/dataset/data/schema';
import { authUserRole } from '@fastgpt/service/support/permission/auth/user';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -27,12 +26,19 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
simple = false
} = req.body as GetDatasetCollectionsProps;
searchText = searchText?.replace(/'/g, '');
pageSize = Math.min(pageSize, 30);
// auth dataset and get my role
const { tmbId } = await authDataset({ req, authToken: true, datasetId, per: 'r' });
const { canWrite } = await authUserRole({ req, authToken: true });
const { teamId, tmbId, canWrite } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});
const match = {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(datasetId),
parentId: parentId ? new Types.ObjectId(parentId) : null,
...(selectFolder ? { type: DatasetCollectionTypeEnum.folder } : {}),
@@ -85,9 +91,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
}
},
{ $project: { _id: 1 } }
{ $count: 'count' }
],
as: 'trainings'
as: 'trainingCount'
}
},
// count collection total data
@@ -103,9 +109,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
}
},
{ $project: { _id: 1 } }
{ $count: 'count' }
],
as: 'datas'
as: 'dataCount'
}
},
{
@@ -117,10 +123,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type: 1,
status: 1,
updateTime: 1,
dataAmount: { $size: '$datas' },
trainingAmount: { $size: '$trainings' },
fileId: 1,
rawLink: 1
rawLink: 1,
dataAmount: {
$ifNull: [{ $arrayElemAt: ['$dataCount.count', 0] }, 0]
},
trainingAmount: {
$ifNull: [{ $arrayElemAt: ['$trainingCount.count', 0] }, 0]
}
}
},
{
@@ -144,7 +154,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
);
if (data.find((item) => item.trainingAmount > 0)) {
startQueue(1);
startQueue();
}
// count collections

View File

@@ -38,7 +38,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
return Promise.reject(DatasetErrEnum.unLinkCollection);
}
const { rawText, isSameRawText } = await getCollectionAndRawText({
const { title, rawText, isSameRawText } = await getCollectionAndRawText({
collection
});
@@ -68,7 +68,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
tmbId: collection.tmbId,
parentId: collection.parentId,
datasetId: collection.datasetId._id,
name: collection.name,
name: title || collection.name,
type: collection.type,
trainingType: collection.trainingType,
chunkSize: collection.chunkSize,

View File

@@ -16,7 +16,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
}
// 凭证校验
await authDatasetCollection({ req, authToken: true, collectionId: id, per: 'w' });
await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
collectionId: id,
per: 'w'
});
const updateFields: Record<string, any> = {
...(parentId !== undefined && { parentId: parentId || null }),

View File

@@ -16,12 +16,28 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
type,
avatar,
vectorModel = global.vectorModels[0].model,
agentModel
agentModel = global.qaModels[0].model
} = req.body as CreateDatasetParams;
// 凭证校验
// auth
const { teamId, tmbId } = await authUserNotVisitor({ req, authToken: true });
// check model valid
const vectorModelStore = global.vectorModels.find((item) => item.model === vectorModel);
const agentModelStore = global.qaModels.find((item) => item.model === agentModel);
if (!vectorModelStore || !agentModelStore) {
throw new Error('vectorModel or qaModel is invalid');
}
// check limit
const authCount = await MongoDataset.countDocuments({
teamId,
type: DatasetTypeEnum.dataset
});
if (authCount >= 50) {
throw new Error('每个团队上限 50 个知识库');
}
const { _id } = await MongoDataset.create({
name,
teamId,

View File

@@ -8,8 +8,8 @@ import { delDatasetDataByDataId } from '@fastgpt/service/core/dataset/data/contr
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { dataId } = req.query as {
dataId: string;
const { id: dataId } = req.query as {
id: string;
};
if (!dataId) {
@@ -17,9 +17,18 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
}
// 凭证校验
await authDatasetData({ req, authToken: true, dataId, per: 'w' });
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'w'
});
await delDatasetDataByDataId(dataId);
await delDatasetDataByDataId({
collectionId: datasetData.collectionId,
mongoDataId: dataId
});
jsonRes(res, {
data: 'success'

View File

@@ -13,12 +13,18 @@ export type Response = {
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { dataId } = req.query as {
dataId: string;
const { id: dataId } = req.query as {
id: string;
};
// 凭证校验
const { datasetData } = await authDatasetData({ req, authToken: true, dataId, per: 'r' });
const { datasetData } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: 'r'
});
jsonRes(res, {
data: datasetData

View File

@@ -16,6 +16,7 @@ import { authTeamBalance } from '@/service/support/permission/auth/bill';
import { pushGenerateVectorBill } from '@/service/support/wallet/bill/push';
import { InsertOneDatasetDataProps } from '@/global/core/dataset/api';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
@@ -39,6 +40,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
per: 'w'
});
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: 1
});
// auth collection and get dataset
const [
{

View File

@@ -17,8 +17,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
collectionId
} = req.body as GetDatasetDataListProps;
pageSize = Math.min(pageSize, 30);
// 凭证校验
await authDatasetCollection({ req, authToken: true, collectionId, per: 'r' });
await authDatasetCollection({ req, authToken: true, authApiKey: true, collectionId, per: 'r' });
searchText = searchText.replace(/'/g, '');
@@ -32,7 +34,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
};
const [data, total] = await Promise.all([
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex indexes')
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex')
.sort({ chunkIndex: 1, updateTime: -1 })
.skip((pageNum - 1) * pageSize)
.limit(pageSize)

View File

@@ -2,38 +2,30 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { TrainingModeEnum, TrainingTypeMap } from '@fastgpt/global/core/dataset/constant';
import { startQueue } from '@/service/utils/tools';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import type { PushDataResponse } from '@/global/core/api/datasetRes.d';
import type { PushDatasetDataProps } from '@/global/core/dataset/api.d';
import { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api';
import { getQAModel, getVectorModel } from '@/service/core/ai/model';
import { authDatasetCollection } from '@fastgpt/service/support/permission/auth/dataset';
import { getCollectionWithDataset } from '@fastgpt/service/core/dataset/controller';
import { simpleText } from '@fastgpt/global/common/string/tools';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/limit/dataset';
import { predictDataLimitLength } from '@fastgpt/global/core/dataset/utils';
import { pushDataToDatasetCollection } from '@/service/core/dataset/data/controller';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { collectionId, data, mode = TrainingModeEnum.chunk } = req.body as PushDatasetDataProps;
const { collectionId, data } = req.body as PushDatasetDataProps;
if (!collectionId || !Array.isArray(data)) {
throw new Error('collectionId or data is empty');
}
if (!TrainingTypeMap[mode]) {
throw new Error(`Mode is not ${Object.keys(TrainingTypeMap).join(', ')}`);
}
if (data.length > 200) {
throw new Error('Data is too long, max 200');
}
// 凭证校验
const { teamId, tmbId } = await authDatasetCollection({
const { teamId, tmbId, collection } = await authDatasetCollection({
req,
authToken: true,
authApiKey: true,
@@ -41,6 +33,13 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
per: 'w'
});
// auth dataset limit
await checkDatasetLimit({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize,
insertLen: predictDataLimitLength(collection.trainingType, data)
});
jsonRes<PushDataResponse>(res, {
data: await pushDataToDatasetCollection({
...req.body,
@@ -56,141 +55,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
}
});
export async function pushDataToDatasetCollection({
teamId,
tmbId,
collectionId,
data,
mode,
prompt,
billId
}: {
teamId: string;
tmbId: string;
} & PushDatasetDataProps): Promise<PushDataResponse> {
const { datasetId, model, maxToken, weight } = await checkModelValid({
mode,
collectionId
});
// format q and a, remove empty char
data.forEach((item) => {
item.q = simpleText(item.q);
item.a = simpleText(item.a);
item.indexes = item.indexes
?.map((index) => {
return {
...index,
text: simpleText(index.text)
};
})
.filter(Boolean);
});
// filter repeat or equal content
const set = new Set();
const filterResult: Record<string, PushDatasetDataChunkProps[]> = {
success: [],
overToken: [],
repeat: [],
error: []
};
data.forEach((item) => {
if (!item.q) {
filterResult.error.push(item);
return;
}
const text = item.q + item.a;
// count q token
const token = countPromptTokens(item.q);
if (token > maxToken) {
filterResult.overToken.push(item);
return;
}
if (set.has(text)) {
console.log('repeat', item);
filterResult.repeat.push(item);
} else {
filterResult.success.push(item);
set.add(text);
}
});
// 插入记录
const insertRes = await MongoDatasetTraining.insertMany(
filterResult.success.map((item, i) => ({
teamId,
tmbId,
datasetId,
collectionId,
billId,
mode,
prompt,
model,
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex ?? i,
weight: weight ?? 0,
indexes: item.indexes
}))
);
insertRes.length > 0 && startQueue();
delete filterResult.success;
return {
insertLen: insertRes.length,
...filterResult
};
}
export async function checkModelValid({
mode,
collectionId
}: {
mode: `${TrainingModeEnum}`;
collectionId: string;
}) {
const {
datasetId: { _id: datasetId, vectorModel, agentModel }
} = await getCollectionWithDataset(collectionId);
if (mode === TrainingModeEnum.chunk) {
if (!collectionId) return Promise.reject(`CollectionId is empty`);
const vectorModelData = getVectorModel(vectorModel);
if (!vectorModelData) {
return Promise.reject(`Model ${vectorModel} is inValid`);
}
return {
datasetId,
maxToken: vectorModelData.maxToken * 1.5,
model: vectorModelData.model,
weight: vectorModelData.weight
};
}
if (mode === TrainingModeEnum.qa) {
const qaModelData = getQAModel(agentModel);
if (!qaModelData) {
return Promise.reject(`Model ${agentModel} is inValid`);
}
return {
datasetId,
maxToken: qaModelData.maxContext * 0.8,
model: qaModelData.model,
weight: 0
};
}
return Promise.reject(`Mode ${mode} is inValid`);
}
export const config = {
api: {
bodyParser: {

View File

@@ -11,7 +11,7 @@ import { UpdateDatasetDataProps } from '@/global/core/dataset/api';
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { id, q = '', a, indexes } = req.body as UpdateDatasetDataProps;
const { id, q = '', a, indexes = [] } = req.body as UpdateDatasetDataProps;
// auth data permission
const {
@@ -23,6 +23,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
} = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId: id,
per: 'w'
});

View File

@@ -20,6 +20,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
const { dataset, canWrite, isOwner } = await authDataset({
req,
authToken: true,
authApiKey: true,
datasetId,
per: 'r'
});

View File

@@ -15,7 +15,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// 凭证校验
const { teamId, tmbId, teamOwner, role, canWrite } = await authUserRole({
req,
authToken: true
authToken: true,
authApiKey: true
});
const datasets = await MongoDataset.find({

View File

@@ -3,14 +3,11 @@ import { jsonRes } from '@fastgpt/service/common/response';
import { request } from '@fastgpt/service/common/api/plusRequest';
import type { Method } from 'axios';
import { setCookie } from '@fastgpt/service/support/permission/controller';
import { getInitConfig } from '../common/system/getInitData';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
import { connectToDatabase } from '@/service/mongo';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
if (!FastGPTProUrl) {
await getInitConfig();
}
await connectToDatabase();
const method = (req.method || 'POST') as Method;
const { path = [], ...query } = req.query as any;

View File

@@ -0,0 +1,39 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { getTeamDatasetValidSub } from '@fastgpt/service/support/wallet/sub/utils';
import { getVectorCountByTeamId } from '@fastgpt/service/common/vectorStore/controller';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
// 凭证校验
const { teamId } = await authCert({
req,
authToken: true
});
const [{ sub, maxSize }, usedSize] = await Promise.all([
getTeamDatasetValidSub({
teamId,
freeSize: global.feConfigs?.subscription?.datasetStoreFreeSize
}),
getVectorCountByTeamId(teamId)
]);
jsonRes(res, {
data: {
sub,
maxSize,
usedSize
}
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -2,7 +2,8 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { withNextCors } from '@fastgpt/service/common/middle/cors';
import { getUploadModel, removeFilesByPaths } from '@fastgpt/service/common/file/upload/multer';
import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
import fs from 'fs';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import { pushWhisperBill } from '@/service/support/wallet/bill/push';

View File

@@ -35,19 +35,17 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const { tokens, vectors } = await getVectorsByText({ input: query, model });
jsonRes(res, {
data: {
object: 'list',
data: vectors.map((item, index) => ({
object: 'embedding',
index: index,
embedding: item
})),
model,
usage: {
prompt_tokens: tokens,
total_tokens: tokens
}
res.json({
object: 'list',
data: vectors.map((item, index) => ({
object: 'embedding',
index: index,
embedding: item
})),
model,
usage: {
prompt_tokens: tokens,
total_tokens: tokens
}
});

View File

@@ -22,6 +22,7 @@ import MyModal from '@/components/MyModal';
import { useAppStore } from '@/web/core/app/store/useAppStore';
import PermissionRadio from '@/components/support/permission/Radio';
import { useTranslation } from 'next-i18next';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const InfoModal = ({
defaultApp,
@@ -45,7 +46,6 @@ const InfoModal = ({
setValue,
getValues,
formState: { errors },
reset,
handleSubmit
} = useForm({
defaultValues: defaultApp
@@ -102,6 +102,7 @@ const InfoModal = ({
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.appAvatar,
file,
maxW: 300,
maxH: 300
@@ -187,4 +188,4 @@ const InfoModal = ({
);
};
export default InfoModal;
export default React.memo(InfoModal);

View File

@@ -81,7 +81,7 @@ const Logs = ({ appId }: { appId: string }) => {
cursor={'pointer'}
onClick={onOpenMarkDesc}
>
{t('chat.Read Mark Description')}
{t('core.chat.Read Mark Description')}
</Box>
</Box>
</>
@@ -202,9 +202,9 @@ const Logs = ({ appId }: { appId: string }) => {
<MyModal
isOpen={isOpenMarkDesc}
onClose={onCloseMarkDesc}
title={t('chat.Mark Description Title')}
title={t('core.chat.Mark Description Title')}
>
<ModalBody whiteSpace={'pre-wrap'}>{t('chat.Mark Description')}</ModalBody>
<ModalBody whiteSpace={'pre-wrap'}>{t('core.chat.Mark Description')}</ModalBody>
</MyModal>
</Flex>
);

View File

@@ -26,6 +26,7 @@ import Avatar from '@/components/Avatar';
import MyTooltip from '@/components/MyTooltip';
import MyModal from '@/components/MyModal';
import { useTranslation } from 'next-i18next';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
type FormType = {
avatar: string;
@@ -59,6 +60,7 @@ const CreateModal = ({ onClose, onSuccess }: { onClose: () => void; onSuccess: (
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.appAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -35,7 +35,7 @@ const ChatHeader = ({
() =>
chatContentReplaceBlock(history[history.length - 2]?.value)?.slice(0, 8) ||
appName ||
t('chat.New Chat'),
t('core.chat.New Chat'),
[appName, history]
);
@@ -56,8 +56,8 @@ const ChatHeader = ({
<MyIcon name={'history'} w={'14px'} />
<Box ml={1}>
{history.length === 0
? t('chat.Fresh Chat')
: t('chat.History Amount', { amount: history.length })}
? t('core.chat.New Chat')
: t('core.chat.History Amount', { amount: history.length })}
</Box>
</Tag>
{!!chatModels && chatModels.length > 0 && (

View File

@@ -74,18 +74,20 @@ const ChatHistorySlider = ({
// custom title edit
const { onOpenModal, EditModal: EditTitleModal } = useEditTitle({
title: t('chat.Custom History Title'),
placeholder: t('chat.Custom History Title Description')
title: t('core.chat.Custom History Title'),
placeholder: t('core.chat.Custom History Title Description')
});
const { openConfirm, ConfirmModal } = useConfirm({
content: isShare
? t('chat.Confirm to clear share chat history')
: t('chat.Confirm to clear history')
? t('core.chat.Confirm to clear share chat history')
: t('core.chat.Confirm to clear history')
});
const concatHistory = useMemo<HistoryItemType[]>(
() =>
!activeChatId ? [{ id: activeChatId, title: t('chat.New Chat') }].concat(history) : history,
!activeChatId
? [{ id: activeChatId, title: t('core.chat.New Chat') }].concat(history)
: history,
[activeChatId, history, t]
);
@@ -144,7 +146,7 @@ const ChatHistorySlider = ({
mr={2}
list={[
{ label: 'App', id: TabEnum.app },
{ label: 'chat.History', id: TabEnum.history }
{ label: t('core.chat.History'), id: TabEnum.history }
]}
activeId={currentTab}
onChange={(e) => setCurrentTab(e as `${TabEnum}`)}
@@ -160,7 +162,7 @@ const ChatHistorySlider = ({
overflow={'hidden'}
onClick={() => onChangeChat()}
>
{t('chat.New Chat')}
{t('core.chat.New Chat')}
</Button>
{(isPc || isShare) && (
@@ -240,7 +242,7 @@ const ChatHistorySlider = ({
}}
>
<MyIcon mr={2} name={'core/chat/setTopLight'} w={'16px'}></MyIcon>
{item.top ? t('chat.Unpin') : t('chat.Pin')}
{item.top ? t('core.chat.Unpin') : t('core.chat.Pin')}
</MenuItem>
)}
{onSetCustomTitle && (
@@ -336,7 +338,7 @@ const ChatHistorySlider = ({
borderRadius={'50%'}
aria-label={''}
/>
{t('chat.Exit Chat')}
{t('core.chat.Exit Chat')}
</Flex>
)}
<EditTitleModal />

View File

@@ -35,7 +35,7 @@ const SliderApps = ({ appId }: { appId: string }) => {
borderRadius={'50%'}
aria-label={''}
/>
{t('chat.Exit Chat')}
{t('core.chat.Exit Chat')}
</Flex>
</Box>
<Box flex={'1 0 0'} h={0} px={5} overflow={'overlay'}>

View File

@@ -15,7 +15,7 @@ const ToolMenu = ({ history }: { history: ChatItemType[] }) => {
() => [
{
icon: 'core/chat/chatLight',
label: t('chat.New Chat'),
label: t('core.chat.New Chat'),
onClick: () => {
router.replace({
query: {

View File

@@ -86,7 +86,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
t('chat.New Chat');
t('core.chat.New Chat');
// new chat
if (completionChatId !== chatId) {
@@ -166,7 +166,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
setLastChatAppId('');
setLastChatId('');
toast({
title: getErrText(e, t('chat.Failed to initialize chat')),
title: getErrText(e, t('core.chat.Failed to initialize chat')),
status: 'error'
});
if (e?.code === 501) {
@@ -210,7 +210,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
if (apps.length === 0) {
toast({
status: 'error',
title: t('chat.You need to a chat app')
title: t('core.chat.You need to a chat app')
});
router.replace('/app/list');
} else {

View File

@@ -88,7 +88,7 @@ const OutLink = ({
const newTitle =
chatContentReplaceBlock(prompts[0].content).slice(0, 20) ||
prompts[1]?.value?.slice(0, 20) ||
t('chat.New Chat');
t('core.chat.New Chat');
// new chat
if (completionChatId !== chatId) {

View File

@@ -32,16 +32,17 @@ import { useRouter } from 'next/router';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import { useLoading } from '@/web/common/hooks/useLoading';
import InputDataModal, { RawSourceText, type InputDataType } from '../components/InputDataModal';
import InputDataModal from '../components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import type { DatasetDataListItemType } from '@/global/core/dataset/type.d';
import { TabEnum } from '..';
import { useUserStore } from '@/web/support/user/useUserStore';
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import {
DatasetCollectionTypeMap,
DatasetCollectionTrainingTypeMap
TrainingModeEnum,
TrainingTypeMap
} from '@fastgpt/global/core/dataset/constant';
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
import { formatFileSize } from '@fastgpt/global/common/file/tools';
@@ -90,7 +91,7 @@ const DataCard = () => {
}
});
const [editInputData, setEditInputData] = useState<InputDataType>();
const [editDataId, setEditDataId] = useState<string>();
// get first page data
const getFirstData = useCallback(
@@ -154,7 +155,7 @@ const DataCard = () => {
},
{
label: t('core.dataset.collection.metadata.Training Type'),
value: t(DatasetCollectionTrainingTypeMap[collection.trainingType]?.label)
value: t(TrainingTypeMap[collection.trainingType]?.label)
},
{
label: t('core.dataset.collection.metadata.Chunk Size'),
@@ -193,7 +194,7 @@ const DataCard = () => {
/>
<Flex className="textEllipsis" flex={'1 0 0'} mr={[3, 5]} alignItems={'center'}>
<Box lineHeight={1.2}>
<RawSourceText
<RawSourceBox
sourceName={collection?.name}
sourceId={collection?.fileId || collection?.rawLink}
fontSize={['md', 'lg']}
@@ -216,10 +217,7 @@ const DataCard = () => {
size={['sm', 'md']}
onClick={() => {
if (!collection) return;
setEditInputData({
q: '',
indexes: [getDefaultIndex({ dataId: `${Date.now()}` })]
});
setEditDataId('');
}}
>
{t('dataset.Insert Data')}
@@ -297,12 +295,7 @@ const DataCard = () => {
}}
onClick={() => {
if (!collection) return;
setEditInputData({
id: item._id,
q: item.q,
a: item.a,
indexes: item.indexes
});
setEditDataId(item._id);
}}
>
<Flex zIndex={1} alignItems={'center'} justifyContent={'space-between'}>
@@ -424,11 +417,11 @@ const DataCard = () => {
</Flex>
)}
{editInputData !== undefined && collection && (
{editDataId !== undefined && collection && (
<InputDataModal
collectionId={collection._id}
defaultValue={editInputData}
onClose={() => setEditInputData(undefined)}
dataId={editDataId}
onClose={() => setEditDataId(undefined)}
onSuccess={() => getData(pageNum)}
onDelete={() => getData(pageNum)}
/>

View File

@@ -4,14 +4,8 @@ import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useToast } from '@/web/common/hooks/useToast';
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { simpleText } from '@fastgpt/global/common/string/tools';
import {
fileDownload,
readCsvContent,
readPdfContent,
readDocContent
} from '@/web/common/file/utils';
import { readFileRawText, readMdFile, readHtmlFile } from '@fastgpt/web/common/file/read';
import { getUploadMdImgController, uploadFiles } from '@/web/common/file/controller';
import { fileDownload, readCsvContent } from '@/web/common/file/utils';
import { getUploadBase64ImgController, uploadFiles } from '@/web/common/file/controller';
import { Box, Flex, useDisclosure, type BoxProps } from '@chakra-ui/react';
import React, { DragEvent, useCallback, useState } from 'react';
import { useTranslation } from 'next-i18next';
@@ -25,6 +19,8 @@ import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d';
import { UrlFetchResponse } from '@fastgpt/global/common/file/api.d';
import { readFileRawContent } from '@fastgpt/web/common/file/read/index';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const UrlFetchModal = dynamic(() => import('./UrlFetchModal'));
const CreateFileModal = dynamic(() => import('./CreateFileModal'));
@@ -168,36 +164,22 @@ const FileSelect = ({
}
// parse and upload files
let text = await (async () => {
switch (extension) {
case 'txt':
return readFileRawText(file);
case 'md':
return readMdFile({
file,
uploadImgController: (base64Img) =>
getUploadMdImgController({ base64Img, metadata: { fileId } })
});
case 'html':
return readHtmlFile({
file,
uploadImgController: (base64Img) =>
getUploadMdImgController({ base64Img, metadata: { fileId } })
});
case 'pdf':
return readPdfContent(file);
case 'docx':
return readDocContent(file, {
let { rawText } = await readFileRawContent({
file,
uploadBase64Controller: (base64Img) =>
getUploadBase64ImgController({
base64Img,
type: MongoImageTypeEnum.docImage,
metadata: {
fileId
});
}
return '';
})();
}
})
});
if (text) {
text = simpleText(text);
if (rawText) {
rawText = simpleText(rawText);
const { chunks, tokens } = splitText2Chunks({
text,
text: rawText,
chunkLen,
overlapRatio,
customReg: customSplitChar ? [customSplitChar] : []
@@ -207,7 +189,7 @@ const FileSelect = ({
id: nanoid(),
filename: file.name,
icon,
rawText: text,
rawText,
tokens,
type: DatasetCollectionTypeEnum.file,
fileId,

View File

@@ -10,10 +10,7 @@ const CsvImport = dynamic(() => import('./Csv'), {});
import MyModal from '@/components/MyModal';
import Provider from './Provider';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import {
DatasetCollectionTrainingModeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constant';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
export enum ImportTypeEnum {
chunk = 'chunk',
@@ -46,24 +43,21 @@ const ImportData = ({
chunkOverlapRatio: 0.2,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.chunk
collectionTrainingType: TrainingModeEnum.chunk
},
[ImportTypeEnum.qa]: {
defaultChunkLen: agentModel?.maxContext * 0.55 || 8000,
chunkOverlapRatio: 0,
inputPrice: agentModel?.inputPrice || 0,
outputPrice: agentModel?.outputPrice || 0,
mode: TrainingModeEnum.qa,
collectionTrainingType: DatasetCollectionTrainingModeEnum.qa
collectionTrainingType: TrainingModeEnum.qa
},
[ImportTypeEnum.csv]: {
defaultChunkLen: 0,
chunkOverlapRatio: 0,
inputPrice: vectorModel?.inputPrice || 0,
outputPrice: 0,
mode: TrainingModeEnum.chunk,
collectionTrainingType: DatasetCollectionTrainingModeEnum.manual
collectionTrainingType: TrainingModeEnum.chunk
}
};
return map[importType];

View File

@@ -16,10 +16,7 @@ import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import {
DatasetCollectionTrainingModeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constant';
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
import { Box, Flex, Image, useTheme } from '@chakra-ui/react';
import { CloseIcon } from '@chakra-ui/icons';
import DeleteIcon, { hoverDeleteStyles } from '@fastgpt/web/components/common/Icon/delete';
@@ -104,7 +101,6 @@ const Provider = ({
parentId,
inputPrice,
outputPrice,
mode,
collectionTrainingType,
vectorModel,
agentModel,
@@ -118,8 +114,7 @@ const Provider = ({
parentId: string;
inputPrice: number;
outputPrice: number;
mode: `${TrainingModeEnum}`;
collectionTrainingType: `${DatasetCollectionTrainingModeEnum}`;
collectionTrainingType: `${TrainingModeEnum}`;
vectorModel: string;
agentModel: string;
defaultChunkLen: number;
@@ -147,14 +142,14 @@ const Provider = ({
const totalTokens = useMemo(() => files.reduce((sum, file) => sum + file.tokens, 0), [files]);
const price = useMemo(() => {
if (mode === TrainingModeEnum.qa) {
if (collectionTrainingType === TrainingModeEnum.qa) {
const inputTotal = totalTokens * inputPrice;
const outputTotal = totalTokens * 0.5 * outputPrice;
return formatModelPrice2Read(inputTotal + outputTotal);
}
return formatModelPrice2Read(totalTokens * inputPrice);
}, [inputPrice, mode, outputPrice, totalTokens]);
}, [collectionTrainingType, inputPrice, outputPrice, totalTokens]);
/*
start upload data
@@ -169,7 +164,7 @@ const Provider = ({
for await (const file of files) {
// create training bill
const billId = await postCreateTrainingBill({
name: t('dataset.collections.Create Training Data', { filename: file.filename }),
name: file.filename,
vectorModel,
agentModel
});
@@ -180,11 +175,15 @@ const Provider = ({
parentId,
name: file.filename,
type: file.type,
trainingType: collectionTrainingType,
chunkSize: chunkLen,
chunkSplitter: customSplitChar,
qaPrompt: collectionTrainingType === TrainingModeEnum.qa ? prompt : '',
fileId: file.fileId,
rawLink: file.rawLink,
chunkSize: chunkLen,
trainingType: collectionTrainingType,
qaPrompt: mode === TrainingModeEnum.qa ? prompt : '',
rawTextLength: file.rawText.length,
hashRawText: hashStr(file.rawText),
metadata: file.metadata
@@ -195,8 +194,8 @@ const Provider = ({
const { insertLen } = await chunksUpload({
collectionId,
billId,
trainingMode: collectionTrainingType,
chunks,
mode,
onUploading: (insertLen) => {
setSuccessChunks((state) => state + insertLen);
},

View File

@@ -1,10 +1,9 @@
import React, { useCallback, useState, useMemo } from 'react';
import React, { useState, useMemo } from 'react';
import { useRouter } from 'next/router';
import { Box, Flex, Button, IconButton, Input, Textarea } from '@chakra-ui/react';
import { DeleteIcon } from '@chakra-ui/icons';
import { delDatasetById } from '@/web/core/dataset/api';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
import { useToast } from '@/web/common/hooks/useToast';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { useForm } from 'react-hook-form';
@@ -17,6 +16,7 @@ import PermissionRadio from '@/components/support/permission/Radio';
import MySelect from '@/components/Select';
import { qaModelList } from '@/web/common/system/staticData';
import { useRequest } from '@/web/common/hooks/useRequest';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const Info = ({ datasetId }: { datasetId: string }) => {
const { t } = useTranslation();
@@ -70,6 +70,7 @@ const Info = ({ datasetId }: { datasetId: string }) => {
const file = e[0];
if (!file) return Promise.resolve(null);
return compressImgFileAndUpload({
type: MongoImageTypeEnum.datasetAvatar,
file,
maxW: 300,
maxH: 300

View File

@@ -1,44 +1,38 @@
import React, { useMemo, useState } from 'react';
import { Box, Flex, Button, Textarea, BoxProps, Image, useTheme, Grid } from '@chakra-ui/react';
import { Box, Flex, Button, Textarea, useTheme, Grid } from '@chakra-ui/react';
import { useFieldArray, useForm } from 'react-hook-form';
import {
postInsertData2Dataset,
putDatasetDataById,
delOneDatasetDataById,
getDatasetCollectionById
getDatasetCollectionById,
getDatasetDataItemById
} from '@/web/core/dataset/api';
import { useToast } from '@/web/common/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import MyIcon from '@fastgpt/web/components/common/Icon';
import MyModal from '@/components/MyModal';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { useQuery } from '@tanstack/react-query';
import { useTranslation } from 'next-i18next';
import { getFileAndOpen } from '@/web/core/dataset/utils';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { useRequest } from '@/web/common/hooks/useRequest';
import { countPromptTokens } from '@fastgpt/global/common/string/tiktoken';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { getDefaultIndex, getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
import { feConfigs, vectorModelList } from '@/web/common/system/staticData';
import { getDefaultIndex } from '@fastgpt/global/core/dataset/utils';
import { vectorModelList } from '@/web/common/system/staticData';
import { DatasetDataIndexTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { DatasetDataIndexItemType } from '@fastgpt/global/core/dataset/type';
import SideTabs from '@/components/SideTabs';
import { useLoading } from '@/web/common/hooks/useLoading';
import DeleteIcon from '@fastgpt/web/components/common/Icon/delete';
import { defaultCollectionDetail } from '@/constants/dataset';
import { getDocPath } from '@/web/common/system/doc';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import MyBox from '@/components/common/MyBox';
import { getErrText } from '@fastgpt/global/common/error/utils';
export type RawSourceTextProps = BoxProps & {
sourceName?: string;
sourceId?: string;
canView?: boolean;
};
export type InputDataType = {
id?: string;
q: string;
a?: string;
a: string;
indexes: (Omit<DatasetDataIndexItemType, 'dataId'> & {
dataId?: string; // pg data id
})[];
@@ -53,26 +47,25 @@ enum TabEnum {
const InputDataModal = ({
collectionId,
dataId,
defaultValue,
onClose,
onSuccess,
onDelete
}: {
collectionId: string;
defaultValue: InputDataType;
dataId?: string;
defaultValue?: { q: string; a?: string };
onClose: () => void;
onSuccess: (data: InputDataType) => void;
onSuccess: (data: InputDataType & { dataId: string }) => void;
onDelete?: () => void;
}) => {
const { t } = useTranslation();
const theme = useTheme();
const { toast } = useToast();
const { Loading } = useLoading();
const [currentTab, setCurrentTab] = useState(TabEnum.content);
const { register, handleSubmit, reset, control } = useForm<InputDataType>({
defaultValues: defaultValue
});
const { register, handleSubmit, reset, control } = useForm<InputDataType>();
const {
fields: indexes,
append: appendIndexes,
@@ -89,14 +82,15 @@ const InputDataModal = ({
id: TabEnum.index,
icon: 'kbTest'
},
...(defaultValue.id
...(dataId
? [{ label: t('dataset.data.edit.Delete'), id: TabEnum.delete, icon: 'delete' }]
: []),
{ label: t('dataset.data.edit.Course'), id: TabEnum.doc, icon: 'common/courseLight' }
];
const { ConfirmModal, openConfirm } = useConfirm({
content: t('dataset.data.Delete Tip')
content: t('dataset.data.Delete Tip'),
type: 'delete'
});
const { data: collection = defaultCollectionDetail } = useQuery(
@@ -105,6 +99,37 @@ const InputDataModal = ({
return getDatasetCollectionById(collectionId);
}
);
const { isFetching: isFetchingData } = useQuery(
['getDatasetDataItemById', dataId],
() => {
if (dataId) return getDatasetDataItemById(dataId);
return null;
},
{
onSuccess(res) {
if (res) {
reset({
q: res.q,
a: res.a,
indexes: res.indexes
});
} else if (defaultValue) {
reset({
q: defaultValue.q,
a: defaultValue.a,
indexes: [getDefaultIndex({ dataId: `${Date.now()}` })]
});
}
},
onError(err) {
toast({
status: 'error',
title: getErrText(err)
});
onClose();
}
}
);
const maxToken = useMemo(() => {
const vectorModel =
@@ -130,7 +155,7 @@ const InputDataModal = ({
const data = { ...e };
data.id = await postInsertData2Dataset({
const dataId = await postInsertData2Dataset({
collectionId: collection._id,
q: e.q,
a: e.a,
@@ -140,7 +165,10 @@ const InputDataModal = ({
)
});
return data;
return {
...data,
dataId
};
},
successToast: t('dataset.data.Input Success Tip'),
onSuccess(e) {
@@ -158,17 +186,18 @@ const InputDataModal = ({
// update
const { mutate: onUpdateData, isLoading: isUpdating } = useRequest({
mutationFn: async (e: InputDataType) => {
if (!e.id) return e;
if (!dataId) return e;
// not exactly same
await putDatasetDataById({
id: e.id,
q: e.q,
a: e.a,
indexes: e.indexes
id: dataId,
...e
});
return e;
return {
dataId,
...e
};
},
successToast: t('dataset.data.Update Success Tip'),
errorToast: t('common.error.unKnow'),
@@ -180,8 +209,8 @@ const InputDataModal = ({
// delete
const { mutate: onDeleteData, isLoading: isDeleting } = useRequest({
mutationFn: () => {
if (!onDelete || !defaultValue.id) return Promise.resolve(null);
return delOneDatasetDataById(defaultValue.id);
if (!onDelete || !dataId) return Promise.resolve(null);
return delOneDatasetDataById(dataId);
},
onSuccess() {
if (!onDelete) return;
@@ -192,13 +221,16 @@ const InputDataModal = ({
errorToast: t('common.error.unKnow')
});
const loading = useMemo(() => isImporting || isUpdating, [isImporting, isUpdating]);
const isLoading = useMemo(
() => isImporting || isUpdating || isFetchingData || isDeleting,
[isImporting, isUpdating, isFetchingData, isDeleting]
);
return (
<MyModal isOpen={true} isCentered w={'90vw'} maxW={'1440px'} h={'90vh'}>
<Flex h={'100%'}>
<MyBox isLoading={isLoading} display={'flex'} h={'100%'}>
<Box p={5} borderRight={theme.borders.base}>
<RawSourceText
<RawSourceBox
w={'200px'}
className="textEllipsis3"
whiteSpace={'pre-wrap'}
@@ -224,7 +256,7 @@ const InputDataModal = ({
<Flex flexDirection={'column'} py={3} flex={1} h={'100%'}>
<Box fontSize={'lg'} px={5} fontWeight={'bold'} mb={4}>
{currentTab === TabEnum.content && (
<>{defaultValue.id ? t('dataset.data.Update Data') : t('dataset.data.Input Data')}</>
<>{dataId ? t('dataset.data.Update Data') : t('dataset.data.Input Data')}</>
)}
{currentTab === TabEnum.index && <> {t('dataset.data.Index Edit')}</>}
</Box>
@@ -351,82 +383,24 @@ const InputDataModal = ({
)}
</Box>
<Flex justifyContent={'flex-end'} px={5} mt={4}>
<Button variant={'whitePrimary'} mr={3} isLoading={loading} onClick={onClose}>
<Button variant={'whiteBase'} mr={3} onClick={onClose}>
{t('common.Close')}
</Button>
<MyTooltip label={collection.canWrite ? '' : t('dataset.data.Can not edit')}>
<Button
isDisabled={!collection.canWrite}
isLoading={loading}
// @ts-ignore
onClick={handleSubmit(defaultValue.id ? onUpdateData : sureImportData)}
onClick={handleSubmit(dataId ? onUpdateData : sureImportData)}
>
{defaultValue.id ? t('common.Confirm Update') : t('common.Confirm Import')}
{dataId ? t('common.Confirm Update') : t('common.Confirm Import')}
</Button>
</MyTooltip>
</Flex>
</Flex>
</Flex>
</MyBox>
<ConfirmModal />
<Loading fixed={false} loading={isDeleting} />
</MyModal>
);
};
export default InputDataModal;
export function RawSourceText({
sourceId,
sourceName = '',
canView = true,
...props
}: RawSourceTextProps) {
const { t } = useTranslation();
const { toast } = useToast();
const { setLoading } = useSystemStore();
const canPreview = useMemo(() => !!sourceId && canView, [canView, sourceId]);
const icon = useMemo(() => getSourceNameIcon({ sourceId, sourceName }), [sourceId, sourceName]);
return (
<MyTooltip
label={canPreview ? t('file.Click to view file') || '' : ''}
shouldWrapChildren={false}
>
<Box
color={'myGray.600'}
display={'inline-flex'}
whiteSpace={'nowrap'}
{...(canPreview
? {
cursor: 'pointer',
textDecoration: 'underline',
onClick: async () => {
setLoading(true);
try {
await getFileAndOpen(sourceId as string);
} catch (error) {
toast({
title: t(getErrText(error, 'error.fileNotFound')),
status: 'error'
});
}
setLoading(false);
}
}
: {})}
{...props}
>
<Image src={icon} alt="" w={['14px', '16px']} mr={2} />
<Box
maxW={['200px', '300px']}
className={props.className ?? 'textEllipsis'}
wordBreak={'break-all'}
>
{sourceName || t('common.UnKnow Source')}
</Box>
</Box>
</MyTooltip>
);
}
export default React.memo(InputDataModal);

View File

@@ -233,7 +233,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
h={'100%'}
resize={'none'}
variant={'unstyled'}
maxLength={datasetDetail.vectorModel.maxToken}
maxLength={datasetDetail.vectorModel?.maxToken}
placeholder={t('core.dataset.test.Test Text Placeholder')}
onFocus={() => setIsFocus(true)}
{...register('inputText', {
@@ -314,7 +314,7 @@ const Test = ({ datasetId }: { datasetId: string }) => {
</Box>
</Box>
{/* result show */}
<Box p={4} h={['auto', '100%']} overflow={'overlay'} flex={'1 0 0'}>
<Box p={4} h={['auto', '100%']} overflow={'overlay'} flex={'1 0 0'} bg={'white'}>
<TestResults datasetTestItem={datasetTestItem} />
</Box>
@@ -384,6 +384,9 @@ const TestHistories = React.memo(function TestHistories({
}}
cursor={'pointer'}
fontSize={'sm'}
{...(item.id === datasetTestItem?.id && {
bg: 'primary.50'
})}
onClick={() => setDatasetTestItem(item)}
>
<Box flex={'0 0 80px'}>

View File

@@ -16,8 +16,6 @@ import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import { getTrainingQueueLen } from '@/web/core/dataset/api';
import MyTooltip from '@/components/MyTooltip';
import { QuestionOutlineIcon } from '@chakra-ui/icons';
import { feConfigs } from '@/web/common/system/staticData';
import Script from 'next/script';
import CollectionCard from './components/CollectionCard';
import { useDatasetStore } from '@/web/core/dataset/store/dataset';
@@ -29,6 +27,7 @@ import {
} from '@fastgpt/global/core/dataset/constant';
import { useConfirm } from '@/web/common/hooks/useConfirm';
import { useRequest } from '@/web/common/hooks/useRequest';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
const DataCard = dynamic(() => import('./components/DataCard'), {
ssr: false
@@ -150,50 +149,47 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
{isPc ? (
<Flex
flexDirection={'column'}
p={4}
py={4}
h={'100%'}
flex={'0 0 200px'}
borderRight={theme.borders.base}
>
<Flex mb={4} alignItems={'center'}>
<Avatar src={datasetDetail.avatar} w={'34px'} borderRadius={'md'} />
<Box ml={2}>
<Box fontWeight={'bold'}>{datasetDetail.name}</Box>
</Box>
</Flex>
{DatasetTypeMap[datasetDetail.type] && (
<Flex alignItems={'center'} pl={2}>
<MyIcon
name={DatasetTypeMap[datasetDetail.type]?.icon as any}
mr={1}
w={'16px'}
/>
<Box flex={1}>{t(DatasetTypeMap[datasetDetail.type]?.label)}</Box>
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
datasetDetail.status === DatasetStatusEnum.active && (
<MyTooltip label={t('core.dataset.website.Start Sync')}>
<MyIcon
mt={1}
name={'common/refreshLight'}
w={'12px'}
color={'myGray.500'}
cursor={'pointer'}
onClick={() =>
openConfirmSync(
onUpdateDatasetWebsiteConfig,
undefined,
t('core.dataset.website.Confirm Create Tips')
)()
}
/>
</MyTooltip>
)}
<Box px={4} borderBottom={'1px'} borderColor={'myGray.200'} pb={4} mb={4}>
<Flex mb={4} alignItems={'center'}>
<Avatar src={datasetDetail.avatar} w={'34px'} borderRadius={'md'} />
<Box ml={2}>
<Box fontWeight={'bold'}>{datasetDetail.name}</Box>
</Box>
</Flex>
)}
{DatasetTypeMap[datasetDetail.type] && (
<Flex alignItems={'center'} pl={2} justifyContent={'space-between'}>
<DatasetTypeTag type={datasetDetail.type} />
{datasetDetail.type === DatasetTypeEnum.websiteDataset &&
datasetDetail.status === DatasetStatusEnum.active && (
<MyTooltip label={t('core.dataset.website.Start Sync')}>
<MyIcon
mt={1}
name={'common/refreshLight'}
w={'12px'}
color={'myGray.500'}
cursor={'pointer'}
onClick={() =>
openConfirmSync(
onUpdateDatasetWebsiteConfig,
undefined,
t('core.dataset.website.Confirm Create Tips')
)()
}
/>
</MyTooltip>
)}
</Flex>
)}
</Box>
<SideTabs
px={4}
flex={1}
mx={'auto'}
mt={3}
w={'100%'}
list={tabList}
activeId={currentTab}
@@ -201,7 +197,7 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
setCurrentTab(e);
}}
/>
<Box>
<Box px={4}>
<Box mb={3}>
<Box fontSize={'sm'}>
{t('core.dataset.training.Agent queue')}({agentTrainingMap.tip})
@@ -229,6 +225,7 @@ const Detail = ({ datasetId, currentTab }: { datasetId: string; currentTab: `${T
/>
</Box>
</Box>
<Flex
alignItems={'center'}
cursor={'pointer'}

View File

@@ -19,6 +19,7 @@ import { useTranslation } from 'next-i18next';
import MyRadio from '@/components/common/MyRadio';
import { DatasetTypeEnum } from '@fastgpt/global/core/dataset/constant';
import { feConfigs } from '@/web/common/system/staticData';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: string }) => {
const { t } = useTranslation();
@@ -49,6 +50,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.datasetAvatar,
file,
maxW: 300,
maxH: 300
@@ -62,7 +64,7 @@ const CreateModal = ({ onClose, parentId }: { onClose: () => void; parentId?: st
});
}
},
[setValue, toast]
[setValue, t, toast]
);
/* create a new kb and router to it */

View File

@@ -22,7 +22,7 @@ import {
putDatasetById,
postCreateDataset
} from '@/web/core/dataset/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/api';
import { checkTeamExportDatasetLimit } from '@/web/support/user/team/api';
import { useTranslation } from 'next-i18next';
import Avatar from '@/components/Avatar';
import MyIcon from '@fastgpt/web/components/common/Icon';
@@ -44,6 +44,7 @@ import PermissionIconText from '@/components/support/permission/IconText';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import { DatasetItemType } from '@fastgpt/global/core/dataset/type';
import ParentPaths from '@/components/common/ParentPaths';
import DatasetTypeTag from '@/components/core/dataset/DatasetTypeTag';
const CreateModal = dynamic(() => import('./component/CreateModal'), { ssr: false });
const MoveModal = dynamic(() => import('./component/MoveModal'), { ssr: false });
@@ -409,8 +410,9 @@ const Kb = () => {
<Box flex={1}>
<PermissionIconText permission={dataset.permission} color={'myGray.600'} />
</Box>
<MyIcon mr={1} name={dataset.icon as any} w={'12px'} />
<Box color={'myGray.500'}>{t(dataset.label)}</Box>
{dataset.type !== DatasetTypeEnum.folder && (
<DatasetTypeTag type={dataset.type} py={1} px={2} />
)}
</Flex>
</Box>
))}

View File

@@ -17,6 +17,7 @@ import { useConfirm } from '@/web/common/hooks/useConfirm';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { CreateOnePluginParams } from '@fastgpt/global/core/plugin/controller';
import { customAlphabet } from 'nanoid';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
export type FormType = CreateOnePluginParams & {
@@ -92,6 +93,7 @@ const CreateModal = ({
if (!file) return;
try {
const src = await compressImgFileAndUpload({
type: MongoImageTypeEnum.pluginAvatar,
file,
maxW: 300,
maxH: 300