fix dataset quick create modal (#5951)

* fix: text split

* remove test

* perf: create dataset modal

* remove log

* fix dataset quick create modal (#5949)

* fix dataset quick create modal

* fix ui

* doc

* fix: text width

---------

Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2025-11-18 18:31:28 +08:00
committed by GitHub
parent 3f47b58a73
commit 6adee71c7a
14 changed files with 379 additions and 150 deletions

View File

@@ -21,6 +21,7 @@ description: 'FastGPT V4.14.2 更新说明'
1. 简易应用模板未正常转化。
2. 工具调用中,包含两个以上连续用户选择时候,第二个用户选择异常。
3. 门户中,团队应用类型错误。
4. 应用作为 MCP 导出,被其他应用使用时,全局变量不需要填写。
## 插件

View File

@@ -116,7 +116,7 @@
"document/content/docs/upgrading/4-13/4132.mdx": "2025-10-21T11:46:53+08:00",
"document/content/docs/upgrading/4-14/4140.mdx": "2025-11-06T15:43:00+08:00",
"document/content/docs/upgrading/4-14/4141.mdx": "2025-11-12T12:19:02+08:00",
"document/content/docs/upgrading/4-14/4142.mdx": "2025-11-17T19:34:52+08:00",
"document/content/docs/upgrading/4-14/4142.mdx": "2025-11-17T21:02:39+08:00",
"document/content/docs/upgrading/4-8/40.mdx": "2025-08-02T19:38:37+08:00",
"document/content/docs/upgrading/4-8/41.mdx": "2025-08-02T19:38:37+08:00",
"document/content/docs/upgrading/4-8/42.mdx": "2025-08-02T19:38:37+08:00",

View File

@@ -129,6 +129,7 @@
"dataset": "dataset",
"dataset.Select_dataset_model_tip": "Only knowledge bases with the same index model can be selected",
"dataset.create_dataset_tips": "For more advanced operations, please go to",
"dataset_create_failed": "Knowledge base creation failed",
"dataset_create_success": "The knowledge base was created successfully and files are being indexed in the background.",
"dataset_empty_tips": "You dont have a knowledge base yet, create one first.",
"dataset_search_tool_description": "Call the \"Semantic Search\" and \"Full-text Search\" capabilities to find reference content that may be related to the problem from the \"Knowledge Base\". \nPrioritize calling this tool to assist in answering user questions.",

View File

@@ -132,6 +132,7 @@
"dataset": "知识库",
"dataset.Select_dataset_model_tip": "仅能选择同一个索引模型的知识库",
"dataset.create_dataset_tips": "更多高级操作请前往",
"dataset_create_failed": "知识库创建失败",
"dataset_create_success": "知识库创建成功,正在后台索引文件",
"dataset_empty_tips": "你还没有知识库,先创建一个吧",
"dataset_search_tool_description": "调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容。优先调用该工具来辅助回答用户的问题。",

View File

@@ -128,6 +128,7 @@
"dataset": "知識庫",
"dataset.Select_dataset_model_tip": "僅能選擇同一個索引模型的知識庫",
"dataset.create_dataset_tips": "更多高級操作請前往",
"dataset_create_failed": "知識庫創建失敗",
"dataset_create_success": "知識庫創建成功,正在後台索引文件",
"dataset_empty_tips": "你還沒有知識庫,先創建一個吧",
"dataset_search_tool_description": "呼叫「語意搜尋」和「全文搜尋」功能,從「知識庫」中尋找可能與問題相關的參考內容。優先呼叫這個工具來協助回答使用者的問題。",

View File

@@ -110,8 +110,8 @@ const Navbar = ({ unread }: { unread: number }) => {
[lastChatAppId, lastPane, t, userInfo?.username]
);
const isSecondNavbarPage = useMemo(() => {
return ['/plugin'].includes(router.pathname);
const isDashboardPage = useMemo(() => {
return router.pathname.startsWith('/dashboard');
}, [router.pathname]);
return (
@@ -123,7 +123,7 @@ const Navbar = ({ unread }: { unread: number }) => {
w={'100%'}
userSelect={'none'}
pb={2}
bg={isSecondNavbarPage ? 'white' : 'transparent'}
bg={isDashboardPage ? 'white' : 'transparent'}
>
{/* logo */}
<Box flex={'0 0 auto'} mb={3}>
@@ -147,7 +147,7 @@ const Navbar = ({ unread }: { unread: number }) => {
: {
bg: 'transparent',
_hover: {
bg: isSecondNavbarPage ? 'white' : 'rgba(255,255,255,0.9)'
bg: isDashboardPage ? 'white' : 'rgba(255,255,255,0.9)'
}
})}
{...(item.link !== router.asPath

View File

@@ -27,7 +27,8 @@ import SearchInput from '@fastgpt/web/components/common/Input/SearchInput';
import { useDatasetSelect } from '@/components/core/dataset/SelectModal';
import FolderPath from '@/components/common/folder/Path';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import QuickCreateDatasetModal from '@/pageComponents/app/detail/components/QuickCreateModal';
import QuickCreateDatasetModal from '@/pageComponents/app/detail/components/QuickCreateDatasetModal';
import { useUserStore } from '@/web/support/user/useUserStore';
// Dataset selection modal component
export const DatasetSelectModal = ({
@@ -47,6 +48,7 @@ export const DatasetSelectModal = ({
const [selectedDatasets, setSelectedDatasets] =
useState<SelectedDatasetType[]>(defaultSelectedDatasets);
const { toast } = useToast();
const { userInfo } = useUserStore();
// Use server-side search, following the logic of the dataset list page
const {
@@ -154,9 +156,11 @@ export const DatasetSelectModal = ({
<Flex h="100%" direction="column" flex={1} overflow="hidden" minH={0}>
<ModalBody flex={1} h={0} overflow="hidden">
{isRootEmpty ? (
<VStack mt={8}>
<VStack h={'full'} justifyContent={'center'}>
<EmptyTip text={t('app:dataset_empty_tips')} py={4} />
<Button onClick={onOpenQuickCreate}>{t('common:Create')}</Button>
{userInfo?.team?.permission.hasDatasetCreatePer && (
<Button onClick={onOpenQuickCreate}>{t('common:Create')}</Button>
)}
</VStack>
) : (
<>
@@ -427,7 +431,7 @@ export const DatasetSelectModal = ({
{/* Modal footer button area */}
<ModalFooter>
<HStack spacing={4} w="full" align="center">
{!isRootEmpty && (
{!isRootEmpty && userInfo?.team?.permission.hasDatasetCreatePer && (
<Button
leftIcon={<MyIcon name="common/addLight" w={4} />}
variant={'transparentBase'}

View File

@@ -408,17 +408,15 @@ const RenderList = React.memo(function RenderList({
borderRadius={'sm'}
flexShrink={0}
/>
<Box
px={3}
color={'myGray.900'}
fontWeight={'500'}
fontSize={'sm'}
maxW={'180px'}
whiteSpace={'nowrap'}
overflow={'hidden'}
textOverflow={'ellipsis'}
>
{t(parseI18nString(template.name, i18n.language))}
<Box flex={'1 0 0'} ml={3}>
<Box
color={'myGray.900'}
fontWeight={'500'}
fontSize={'sm'}
className="textEllipsis"
>
{t(parseI18nString(template.name, i18n.language))}
</Box>
</Box>
<Box flex={1} />

View File

@@ -1,4 +1,4 @@
import React, { useState } from 'react';
import React, { useState, useMemo } from 'react';
import { useTranslation } from 'next-i18next';
import { useForm } from 'react-hook-form';
import {
@@ -18,21 +18,10 @@ import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
import { useUploadAvatar } from '@fastgpt/web/common/file/hooks/useUploadAvatar';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import {
postCreateDataset,
getDatasetById,
postCreateDatasetFileCollection
} from '@/web/core/dataset/api';
import { postCreateDatasetWithFiles, getDatasetById } from '@/web/core/dataset/api';
import { getUploadAvatarPresignedUrl } from '@/web/common/file/api';
import { uploadFile2DB } from '@/web/common/file/controller';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import {
ChunkSettingModeEnum,
ChunkTriggerConfigTypeEnum,
DataChunkSplitModeEnum,
DatasetCollectionDataProcessModeEnum,
DatasetTypeEnum
} from '@fastgpt/global/core/dataset/constants';
import { getWebDefaultEmbeddingModel, getWebDefaultLLMModel } from '@/web/common/system/utils';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
@@ -66,6 +55,11 @@ const QuickCreateDatasetModal = ({
const [selectFiles, setSelectFiles] = useState<ImportSourceItemType[]>([]);
const successFiles = useMemo(
() => selectFiles.filter((item) => item.dbFileId && !item.errorMsg),
[selectFiles]
);
const { register, handleSubmit, watch, setValue } = useForm({
defaultValues: {
parentId,
@@ -83,103 +77,106 @@ const QuickCreateDatasetModal = ({
}
});
const handleSelectFiles = (files: SelectFileItemType[]) => {
setSelectFiles((state) => [
...state,
...files.map<ImportSourceItemType>((selectFile) => {
const { fileId, file } = selectFile;
const { runAsync: handleSelectFiles, loading: uploading } = useRequest2(
async (files: SelectFileItemType[]) => {
await Promise.all(
files.map(async ({ fileId, file }) => {
try {
const { fileId: uploadFileId } = await uploadFile2DB({
file,
bucketName: BucketNameEnum.dataset,
data: { datasetId: '' },
percentListen: (percent) => {
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
uploadedFileRate: item.uploadedFileRate
? Math.max(percent, item.uploadedFileRate)
: percent
}
: item
)
);
}
});
return {
id: fileId,
createStatus: 'waiting',
file,
sourceName: file.name,
sourceSize: formatFileSize(file.size),
icon: getFileIcon(file.name),
uploadedFileRate: 0
};
})
]);
};
const uploadSingleFile = async (fileItem: ImportSourceItemType, datasetId: string) => {
try {
if (!fileItem.file) return;
setSelectFiles((prev) =>
prev.map((item) => (item.id === fileItem.id ? { ...item, uploadedFileRate: 0 } : item))
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
dbFileId: uploadFileId,
isUploading: false,
uploadedFileRate: 100
}
: item
)
);
} catch (error) {
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
isUploading: false,
errorMsg: getErrText(error)
}
: item
)
);
}
})
);
},
{
manual: true,
onBefore([files]) {
setSelectFiles((state) => [
...state,
...files.map<ImportSourceItemType>((selectFile) => {
const { fileId, file } = selectFile;
const { fileId } = await uploadFile2DB({
file: fileItem.file,
bucketName: BucketNameEnum.dataset,
data: { datasetId },
percentListen: (percent) => {
setSelectFiles((prev) =>
prev.map((item) =>
item.id === fileItem.id
? { ...item, uploadedFileRate: Math.max(percent, item.uploadedFileRate || 0) }
: item
)
);
}
});
await postCreateDatasetFileCollection({
datasetId,
fileId,
trainingType: DatasetCollectionDataProcessModeEnum.chunk,
chunkTriggerType: ChunkTriggerConfigTypeEnum.minSize,
chunkTriggerMinSize: 1000,
chunkSettingMode: ChunkSettingModeEnum.auto,
chunkSplitMode: DataChunkSplitModeEnum.paragraph,
chunkSize: 1024,
indexSize: 512,
customPdfParse: false
});
setSelectFiles((prev) =>
prev.map((item) =>
item.id === fileItem.id ? { ...item, dbFileId: fileId, uploadedFileRate: 100 } : item
)
);
} catch (error) {
setSelectFiles((prev) =>
prev.map((item) =>
item.id === fileItem.id ? { ...item, errorMsg: getErrText(error) } : item
)
);
return {
id: fileId,
createStatus: 'waiting',
file,
sourceName: file.name,
sourceSize: formatFileSize(file.size),
icon: getFileIcon(file.name),
isUploading: true,
uploadedFileRate: 0
};
})
]);
}
}
};
);
const { runAsync: onCreate, loading: isCreating } = useRequest2(
async (data) => {
const datasetId = await postCreateDataset({
name: data.name.trim(),
avatar: data.avatar,
intro: '',
parentId,
type: DatasetTypeEnum.dataset,
vectorModel: defaultVectorModel,
agentModel: defaultAgentModel,
vlmModel: defaultVLLM
return await postCreateDatasetWithFiles({
datasetParams: {
name: data.name.trim(),
avatar: data.avatar,
parentId,
vectorModel: defaultVectorModel,
agentModel: defaultAgentModel,
vlmModel: defaultVLLM
},
files: selectFiles
.filter((item) => item.dbFileId && !item.errorMsg)
.map((item) => ({
fileId: item.dbFileId!,
name: item.sourceName
}))
});
if (selectFiles.length > 0) {
await Promise.all(selectFiles.map((file) => uploadSingleFile(file, datasetId)));
}
const datasetDetail = await getDatasetById(datasetId);
return {
datasetId,
name: datasetDetail.name,
avatar: datasetDetail.avatar,
vectorModel: datasetDetail.vectorModel
};
},
{
manual: true,
successToast: t('app:dataset_create_success'),
errorToast: t('app:dataset_create_failed'),
onSuccess: (result) => {
onSuccess(result);
onClose();
@@ -198,18 +195,19 @@ const QuickCreateDatasetModal = ({
>
<ModalBody py={6} minH={'500px'}>
<Box mb={6}>
<FormLabel mb={2}>{t('common:app_icon_and_name')}</FormLabel>
<FormLabel mb={2}>{t('common:input_name')}</FormLabel>
<Flex alignItems={'center'}>
<MyTooltip label={t('common:set_avatar')}>
<Avatar
src={avatar}
w={9}
h={9}
mr={4}
borderRadius={'8px'}
cursor={'pointer'}
onClick={handleAvatarSelectorOpen}
/>
<Box w={9} h={9} mr={4}>
<Avatar
src={avatar}
w={'full'}
h={'full'}
borderRadius={'8px'}
cursor={'pointer'}
onClick={handleAvatarSelectorOpen}
/>
</Box>
</MyTooltip>
<FormControl flex={1}>
<Input
@@ -288,7 +286,7 @@ const QuickCreateDatasetModal = ({
) : null}
</Flex>
<Flex w={1 / 5} justifyContent={'end'}>
{!item.uploadedFileRate && (
{!item.isUploading && (
<Flex alignItems={'center'} justifyContent={'center'} w={6} h={6}>
<MyIcon
name={'delete'}
@@ -331,7 +329,7 @@ const QuickCreateDatasetModal = ({
</Button>
<Button
isLoading={isCreating}
isDisabled={selectFiles.length === 0}
isDisabled={successFiles.length === 0 || uploading}
onClick={handleSubmit(onCreate)}
>
{t('common:Create')}

View File

@@ -349,7 +349,7 @@ const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkForm
desc: t('dataset:custom_data_process_params_desc'),
value: ChunkSettingModeEnum.custom,
children: chunkSettingMode === ChunkSettingModeEnum.custom && (
<Box mt={5}>
<Box>
<Box>
<RadioGroup<DataChunkSplitModeEnum>
list={[
@@ -377,8 +377,8 @@ const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkForm
{chunkSplitMode === DataChunkSplitModeEnum.paragraph && (
<>
<Box mt={3}>
<Box fontSize={'sm'}>{t('dataset:llm_paragraph_mode')}</Box>
<Box mt={3} fontSize={'sm'}>
<Box mb={1}>{t('dataset:llm_paragraph_mode')}</Box>
<MySelect<ParagraphChunkAIModeEnum>
size={'sm'}
bg={'myGray.50'}
@@ -406,7 +406,7 @@ const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkForm
/>
</Box>
<Box mt={2} fontSize={'sm'}>
<Box>{t('dataset:paragraph_max_deep')}</Box>
<Box mb={1}>{t('dataset:paragraph_max_deep')}</Box>
<MyNumberInput
size={'sm'}
bg={'myGray.50'}
@@ -419,7 +419,7 @@ const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkForm
/>
</Box>
<Box mt={2} fontSize={'sm'}>
<Box>{t('dataset:max_chunk_size')}</Box>
<Box mb={1}>{t('dataset:max_chunk_size')}</Box>
<Box
css={{
'& > span': {
@@ -478,7 +478,7 @@ const CollectionChunkForm = ({ form }: { form: UseFormReturn<CollectionChunkForm
{chunkSplitMode === DataChunkSplitModeEnum.char && (
<Box mt={3} fontSize={'sm'}>
<Box>{t('dataset:custom_split_char')}</Box>
<Box mb={1}>{t('dataset:custom_split_char')}</Box>
<HStack>
<Box flex={'1 0 0'}>
<MySelect<string>

View File

@@ -13,12 +13,14 @@ import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { type OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
import { createFileToken } from '@fastgpt/service/support/permission/auth/file';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import { TeamDatasetCreatePermissionVal } from '@fastgpt/global/support/permission/user/constant';
export type UploadChatFileProps = {
appId: string;
} & OutLinkChatAuthProps;
export type UploadDatasetFileProps = {
datasetId: string;
datasetId?: string;
};
const authUploadLimit = (tmbId: string) => {
@@ -58,18 +60,32 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
};
}
if (bucketName === 'dataset') {
const chatData = data as UploadDatasetFileProps;
const authData = await authDataset({
datasetId: chatData.datasetId,
per: WritePermissionVal,
req,
authToken: true,
authApiKey: true
});
return {
teamId: authData.teamId,
uid: authData.tmbId
};
const datasetData = data as UploadDatasetFileProps;
if (datasetData.datasetId) {
const authData = await authDataset({
datasetId: datasetData.datasetId,
per: WritePermissionVal,
req,
authToken: true,
authApiKey: true
});
return {
teamId: authData.teamId,
uid: authData.tmbId
};
} else {
const authData = await authUserPer({
req,
authToken: true,
authApiKey: true,
per: TeamDatasetCreatePermissionVal
});
return {
teamId: authData.teamId,
uid: authData.tmbId
};
}
}
return Promise.reject('bucketName is empty');
})();

View File

@@ -0,0 +1,203 @@
import { NextAPI } from '@/service/middleware/entry';
import { parseParentIdInMongo } from '@fastgpt/global/common/parentFolder/utils';
import {
DatasetTypeEnum,
DatasetCollectionTypeEnum,
DatasetCollectionDataProcessModeEnum,
ChunkTriggerConfigTypeEnum,
ChunkSettingModeEnum,
DataChunkSplitModeEnum
} from '@fastgpt/global/core/dataset/constants';
import {
OwnerRoleVal,
PerResourceTypeEnum,
WritePermissionVal
} from '@fastgpt/global/support/permission/constant';
import { TeamDatasetCreatePermissionVal } from '@fastgpt/global/support/permission/user/constant';
import { pushTrack } from '@fastgpt/service/common/middle/tracks/utils';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import {
getDefaultEmbeddingModel,
getDefaultLLMModel,
getDefaultVLMModel,
getEmbeddingModel
} from '@fastgpt/service/core/ai/model';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { checkTeamDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
import { authUserPer } from '@fastgpt/service/support/permission/user/auth';
import type { ApiRequestProps } from '@fastgpt/service/type/next';
import { addAuditLog } from '@fastgpt/service/support/user/audit/util';
import { AuditEventEnum } from '@fastgpt/global/support/user/audit/constants';
import { getI18nDatasetType } from '@fastgpt/service/support/user/audit/util';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { getS3AvatarSource } from '@fastgpt/service/common/s3/sources/avatar';
import { createCollectionAndInsertData } from '@fastgpt/service/core/dataset/collection/controller';
import { getFileById, delFileByFileIdList } from '@fastgpt/service/common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import type { EmbeddingModelItemType } from '@fastgpt/global/core/ai/model.d';
export type DatasetCreateWithFilesQuery = {};
export type DatasetCreateWithFilesBody = {
datasetParams: {
name: string;
avatar: string;
parentId?: string;
vectorModel?: string;
agentModel?: string;
vlmModel?: string;
};
files: {
fileId: string;
name: string;
}[];
};
export type DatasetCreateWithFilesResponse = {
datasetId: string;
name: string;
avatar: string;
vectorModel: EmbeddingModelItemType;
};
async function handler(
req: ApiRequestProps<DatasetCreateWithFilesBody, DatasetCreateWithFilesQuery>
): Promise<DatasetCreateWithFilesResponse> {
const { datasetParams, files } = req.body;
const {
parentId,
name,
avatar,
vectorModel = getDefaultEmbeddingModel()?.model,
agentModel = getDefaultLLMModel()?.model,
vlmModel = getDefaultVLMModel()?.model
} = datasetParams;
const { teamId, tmbId, userId } = parentId
? await authDataset({
req,
datasetId: parentId,
authToken: true,
authApiKey: true,
per: WritePermissionVal
})
: await authUserPer({
req,
authToken: true,
authApiKey: true,
per: TeamDatasetCreatePermissionVal
});
// check limit
await checkTeamDatasetLimit(teamId);
try {
const result = await mongoSessionRun(async (session) => {
// 1. Create dataset
const [dataset] = await MongoDataset.create(
[
{
...parseParentIdInMongo(parentId),
name,
teamId,
tmbId,
vectorModel,
agentModel,
vlmModel,
avatar,
intro: '',
type: DatasetTypeEnum.dataset
}
],
{ session, ordered: true }
);
// 2. Create permission
await MongoResourcePermission.insertOne({
teamId,
tmbId,
resourceId: dataset._id,
permission: OwnerRoleVal,
resourceType: PerResourceTypeEnum.dataset
});
// 3. Refresh avatar
await getS3AvatarSource().refreshAvatar(avatar, undefined, session);
// 4. Create collections for each file
for (const file of files) {
const gridFile = await getFileById({
bucketName: BucketNameEnum.dataset,
fileId: file.fileId
});
if (!gridFile) {
return Promise.reject(CommonErrEnum.fileNotFound);
}
await createCollectionAndInsertData({
dataset,
createCollectionParams: {
datasetId: dataset._id,
teamId,
tmbId,
type: DatasetCollectionTypeEnum.file,
name: file.name || gridFile.filename,
fileId: file.fileId,
metadata: {
relatedImgId: file.fileId
},
trainingType: DatasetCollectionDataProcessModeEnum.chunk,
chunkTriggerType: ChunkTriggerConfigTypeEnum.minSize,
chunkTriggerMinSize: 1000,
chunkSettingMode: ChunkSettingModeEnum.auto,
chunkSplitMode: DataChunkSplitModeEnum.paragraph,
chunkSize: 1024,
indexSize: 512,
customPdfParse: false
},
session
});
}
return {
datasetId: dataset._id,
name: dataset.name,
avatar: dataset.avatar,
vectorModel: getEmbeddingModel(dataset.vectorModel)
};
});
// Track and audit log
pushTrack.createDataset({
type: DatasetTypeEnum.dataset,
teamId,
tmbId,
uid: userId
});
(async () => {
addAuditLog({
tmbId,
teamId,
event: AuditEventEnum.CREATE_DATASET,
params: {
datasetName: name,
datasetType: getI18nDatasetType(DatasetTypeEnum.dataset)
}
});
})();
return result;
} catch (error) {
const fileIds = files.map((file) => file.fileId);
await delFileByFileIdList({
bucketName: BucketNameEnum.dataset,
fileIdList: fileIds
});
return Promise.reject(error);
}
}
export default NextAPI(handler);

View File

@@ -6,7 +6,6 @@ import { type DatasetItemType } from '@fastgpt/global/core/dataset/type';
import { type ApiRequestProps } from '@fastgpt/service/type/next';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { getDatasetSyncDatasetStatus } from '@fastgpt/service/core/dataset/datasetSync';
import { DatasetStatusEnum, DatasetTypeEnum } from '@fastgpt/global/core/dataset/constants';
import { filterApiDatasetServerPublicData } from '@fastgpt/global/core/dataset/apiDataset/utils';
type Query = {

View File

@@ -79,6 +79,10 @@ import type {
GetApiDatasetPathResponse
} from '@/pages/api/core/dataset/apiDataset/getPathNames';
import type { DelCollectionBody } from '@/pages/api/core/dataset/collection/delete';
import type {
DatasetCreateWithFilesBody,
DatasetCreateWithFilesResponse
} from '@/pages/api/core/dataset/createWithFiles';
/* ======================== dataset ======================= */
export const getDatasets = (data: GetDatasetListBody) =>
@@ -100,6 +104,9 @@ export const getDatasetById = (id: string) => GET<DatasetItemType>(`/core/datase
export const postCreateDataset = (data: CreateDatasetParams) =>
POST<string>(`/core/dataset/create`, data);
export const postCreateDatasetWithFiles = (data: DatasetCreateWithFilesBody) =>
POST<DatasetCreateWithFilesResponse>(`/core/dataset/createWithFiles`, data);
export const putDatasetById = (data: DatasetUpdateBody) => PUT<void>(`/core/dataset/update`, data);
export const delDatasetById = (id: string) => DELETE(`/core/dataset/delete?id=${id}`);