mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-27 00:17:31 +00:00
File input (#2270)
* doc * feat: file upload config * perf: chat box file params * feat: markdown show file * feat: chat file store and clear * perf: read file contentType * feat: llm vision config * feat: file url output * perf: plugin error text * perf: image load * feat: ai chat document * perf: file block ui * feat: read file node * feat: file read response field * feat: simple mode support read files * feat: tool call * feat: read file histories * perf: select file * perf: select file config * i18n * i18n * fix: ts; feat: tool response preview result
This commit is contained in:
167
projects/app/public/imgs/app/fileUploadPlaceholder.svg
Normal file
167
projects/app/public/imgs/app/fileUploadPlaceholder.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 1.7 MiB |
@@ -1,9 +1,9 @@
|
||||
import React, { useState } from 'react';
|
||||
import { Skeleton } from '@chakra-ui/react';
|
||||
import { ImageProps, Skeleton } from '@chakra-ui/react';
|
||||
import MyPhotoView from '@fastgpt/web/components/common/Image/PhotoView';
|
||||
import { useBoolean } from 'ahooks';
|
||||
|
||||
const MdImage = ({ src }: { src?: string }) => {
|
||||
const MdImage = ({ src, ...props }: { src?: string } & ImageProps) => {
|
||||
const [isLoaded, { setTrue }] = useBoolean(false);
|
||||
|
||||
const [renderSrc, setRenderSrc] = useState(src);
|
||||
@@ -31,6 +31,7 @@ const MdImage = ({ src }: { src?: string }) => {
|
||||
setRenderSrc('/imgs/errImg.png');
|
||||
setTrue();
|
||||
}}
|
||||
{...props}
|
||||
/>
|
||||
</Skeleton>
|
||||
);
|
||||
|
@@ -6,8 +6,8 @@ import { useTranslation } from 'next-i18next';
|
||||
const VariableTip = (props: StackProps) => {
|
||||
const { t } = useTranslation();
|
||||
return (
|
||||
<HStack fontSize={'xs'} spacing={1} {...props}>
|
||||
<MyIcon name={'common/info'} w={'0.9rem'} transform={'translateY(1px)'} />
|
||||
<HStack fontSize={'11px'} spacing={1} {...props}>
|
||||
<MyIcon name={'common/info'} w={'0.8rem'} />
|
||||
<Box>{t('common:textarea_variable_picker_tip')}</Box>
|
||||
</HStack>
|
||||
);
|
||||
|
@@ -41,8 +41,11 @@ const AIChatSettingsModal = ({
|
||||
});
|
||||
const model = watch('model');
|
||||
const showResponseAnswerText = watch(NodeInputKeyEnum.aiChatIsResponseText) !== undefined;
|
||||
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
|
||||
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
|
||||
const useVision = watch('aiChatVision');
|
||||
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0];
|
||||
const llmSupportVision = !!selectedModel?.vision;
|
||||
|
||||
const tokenLimit = useMemo(() => {
|
||||
return llmModelList.find((item) => item.model === model)?.maxResponse || 4096;
|
||||
@@ -65,7 +68,7 @@ const AIChatSettingsModal = ({
|
||||
alignItems: 'center',
|
||||
fontSize: 'sm',
|
||||
color: 'myGray.900',
|
||||
width: ['80px', '90px']
|
||||
width: ['6rem', '8rem']
|
||||
};
|
||||
|
||||
return (
|
||||
@@ -110,26 +113,24 @@ const AIChatSettingsModal = ({
|
||||
</Box>
|
||||
</Flex>
|
||||
{feConfigs && (
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.ai.Ai point price')}
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
{t('support.wallet.Ai point every thousand tokens', {
|
||||
<Box flex={1}>
|
||||
{t('common:support.wallet.Ai point every thousand tokens', {
|
||||
points: selectedModel?.charsPointsPrice || 0
|
||||
})}
|
||||
</Box>
|
||||
</Flex>
|
||||
)}
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.ai.Max context')}
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
{selectedModel?.maxContext || 4096}Tokens
|
||||
</Box>
|
||||
<Box flex={1}>{selectedModel?.maxContext || 4096}Tokens</Box>
|
||||
</Flex>
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.ai.Support tool')}
|
||||
<QuestionTip ml={1} label={t('common:core.module.template.AI support tool tip')} />
|
||||
@@ -140,11 +141,11 @@ const AIChatSettingsModal = ({
|
||||
: t('common:common.not_support')}
|
||||
</Box>
|
||||
</Flex>
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.app.Temperature')}
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
<Box flex={1} ml={1}>
|
||||
<MySlider
|
||||
markList={[
|
||||
{ label: t('common:core.app.deterministic'), value: 0 },
|
||||
@@ -161,11 +162,11 @@ const AIChatSettingsModal = ({
|
||||
/>
|
||||
</Box>
|
||||
</Flex>
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.app.Max tokens')}
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
<Box flex={1}>
|
||||
<MySlider
|
||||
markList={[
|
||||
{ label: '100', value: 100 },
|
||||
@@ -184,11 +185,11 @@ const AIChatSettingsModal = ({
|
||||
</Box>
|
||||
</Flex>
|
||||
{showMaxHistoriesSlider && (
|
||||
<Flex mt={8}>
|
||||
<Flex mt={6}>
|
||||
<Box {...LabelStyles} mr={2}>
|
||||
{t('common:core.app.Max histories')}
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
<Box flex={1}>
|
||||
<MySlider
|
||||
markList={[
|
||||
{ label: 0, value: 0 },
|
||||
@@ -207,7 +208,7 @@ const AIChatSettingsModal = ({
|
||||
</Flex>
|
||||
)}
|
||||
{showResponseAnswerText && (
|
||||
<Flex mt={8} alignItems={'center'}>
|
||||
<Flex mt={6} alignItems={'center'}>
|
||||
<Box {...LabelStyles}>
|
||||
{t('common:core.app.Ai response')}
|
||||
<QuestionTip
|
||||
@@ -215,7 +216,7 @@ const AIChatSettingsModal = ({
|
||||
label={t('common:core.module.template.AI response switch tip')}
|
||||
></QuestionTip>
|
||||
</Box>
|
||||
<Box flex={1} ml={'10px'}>
|
||||
<Box flex={1}>
|
||||
<Switch
|
||||
isChecked={getValues(NodeInputKeyEnum.aiChatIsResponseText)}
|
||||
onChange={(e) => {
|
||||
@@ -227,6 +228,29 @@ const AIChatSettingsModal = ({
|
||||
</Box>
|
||||
</Flex>
|
||||
)}
|
||||
{showVisionSwitch && (
|
||||
<Flex mt={6} alignItems={'center'}>
|
||||
<Box {...LabelStyles}>
|
||||
{t('app:llm_use_vision')}
|
||||
<QuestionTip ml={1} label={t('app:llm_use_vision_tip')}></QuestionTip>
|
||||
</Box>
|
||||
<Box flex={1}>
|
||||
{llmSupportVision ? (
|
||||
<Switch
|
||||
isChecked={useVision}
|
||||
onChange={(e) => {
|
||||
const value = e.target.checked;
|
||||
setValue(NodeInputKeyEnum.aiChatVision, value);
|
||||
}}
|
||||
/>
|
||||
) : (
|
||||
<Box fontSize={'sm'} color={'myGray.500'}>
|
||||
{t('app:llm_not_support_vision')}
|
||||
</Box>
|
||||
)}
|
||||
</Box>
|
||||
</Flex>
|
||||
)}
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button variant={'whiteBase'} onClick={onClose}>
|
||||
|
@@ -1,13 +1,15 @@
|
||||
import React, { useEffect } from 'react';
|
||||
import React from 'react';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
|
||||
import { Box, Button, Flex, css, useDisclosure } from '@chakra-ui/react';
|
||||
import { Box, Button, css, useDisclosure } from '@chakra-ui/react';
|
||||
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
|
||||
import AISettingModal from '@/components/core/ai/AISettingModal';
|
||||
import Avatar from '@fastgpt/web/components/common/Avatar';
|
||||
import { HUGGING_FACE_ICON } from '@fastgpt/global/common/system/constants';
|
||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useMount } from 'ahooks';
|
||||
|
||||
type Props = {
|
||||
llmModelType?: `${LLMModelTypeEnum}`;
|
||||
@@ -37,14 +39,15 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
|
||||
onClose: onCloseAIChatSetting
|
||||
} = useDisclosure();
|
||||
|
||||
useEffect(() => {
|
||||
// Set default model
|
||||
useMount(() => {
|
||||
if (!model && modelList.length > 0) {
|
||||
onChange({
|
||||
...defaultData,
|
||||
model: modelList[0].model
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
});
|
||||
|
||||
return (
|
||||
<Box
|
||||
@@ -71,10 +74,13 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
|
||||
w={'18px'}
|
||||
/>
|
||||
}
|
||||
rightIcon={<MyIcon name={'common/select'} w={'1rem'} />}
|
||||
pl={4}
|
||||
onClick={onOpenAIChatSetting}
|
||||
>
|
||||
{selectedModel?.name}
|
||||
<Box flex={1} textAlign={'left'}>
|
||||
{selectedModel?.name}
|
||||
</Box>
|
||||
</Button>
|
||||
</MyTooltip>
|
||||
{isOpenAIChatSetting && (
|
||||
|
147
projects/app/src/components/core/app/FileSelect.tsx
Normal file
147
projects/app/src/components/core/app/FileSelect.tsx
Normal file
@@ -0,0 +1,147 @@
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||
import {
|
||||
Box,
|
||||
Button,
|
||||
Flex,
|
||||
ModalBody,
|
||||
useDisclosure,
|
||||
Image,
|
||||
HStack,
|
||||
Switch,
|
||||
ModalFooter
|
||||
} from '@chakra-ui/react';
|
||||
import React, { useMemo } from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import type { AppFileSelectConfigType } from '@fastgpt/global/core/app/type.d';
|
||||
import MyModal from '@fastgpt/web/components/common/MyModal';
|
||||
import MySlider from '@/components/Slider';
|
||||
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
|
||||
import ChatFunctionTip from './Tip';
|
||||
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
|
||||
import { useMount } from 'ahooks';
|
||||
|
||||
const FileSelect = ({
|
||||
forbidVision = false,
|
||||
value = defaultAppSelectFileConfig,
|
||||
onChange
|
||||
}: {
|
||||
forbidVision?: boolean;
|
||||
value?: AppFileSelectConfigType;
|
||||
onChange: (e: AppFileSelectConfigType) => void;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||
|
||||
const formLabel = useMemo(
|
||||
() =>
|
||||
value.canSelectFile || value.canSelectImg
|
||||
? t('common:core.app.whisper.Open')
|
||||
: t('common:core.app.whisper.Close'),
|
||||
[t, value.canSelectFile, value.canSelectImg]
|
||||
);
|
||||
|
||||
// Close select img switch when vision is forbidden
|
||||
useMount(() => {
|
||||
if (forbidVision) {
|
||||
onChange({
|
||||
...value,
|
||||
canSelectImg: false
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<Flex alignItems={'center'}>
|
||||
<MyIcon name={'core/app/simpleMode/file'} mr={2} w={'20px'} />
|
||||
<FormLabel>{t('app:file_upload')}</FormLabel>
|
||||
<ChatFunctionTip type={'file'} />
|
||||
<Box flex={1} />
|
||||
<MyTooltip label={t('app:config_file_upload')}>
|
||||
<Button
|
||||
variant={'transparentBase'}
|
||||
iconSpacing={1}
|
||||
size={'sm'}
|
||||
mr={'-5px'}
|
||||
onClick={onOpen}
|
||||
>
|
||||
{formLabel}
|
||||
</Button>
|
||||
</MyTooltip>
|
||||
<MyModal
|
||||
iconSrc="core/app/simpleMode/file"
|
||||
title={t('app:file_upload')}
|
||||
isOpen={isOpen}
|
||||
onClose={onClose}
|
||||
>
|
||||
<ModalBody>
|
||||
<HStack>
|
||||
<FormLabel flex={'1 0 0'}>{t('app:document_upload')}</FormLabel>
|
||||
<Switch
|
||||
isChecked={value.canSelectFile}
|
||||
onChange={(e) => {
|
||||
onChange({
|
||||
...value,
|
||||
canSelectFile: e.target.checked
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</HStack>
|
||||
<HStack mt={6}>
|
||||
<FormLabel flex={'1 0 0'}>{t('app:image_upload')}</FormLabel>
|
||||
{forbidVision ? (
|
||||
<Box fontSize={'sm'} color={'myGray.500'}>
|
||||
{t('app:llm_not_support_vision')}
|
||||
</Box>
|
||||
) : (
|
||||
<Switch
|
||||
isChecked={value.canSelectImg}
|
||||
onChange={(e) => {
|
||||
onChange({
|
||||
...value,
|
||||
canSelectImg: e.target.checked
|
||||
});
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</HStack>
|
||||
{!forbidVision && (
|
||||
<Box mt={2} color={'myGray.500'} fontSize={'xs'}>
|
||||
{t('app:image_upload_tip')}
|
||||
</Box>
|
||||
)}
|
||||
|
||||
<Box mt={6}>
|
||||
<FormLabel>{t('app:upload_file_max_amount')}</FormLabel>
|
||||
<Box mt={5}>
|
||||
<MySlider
|
||||
markList={[
|
||||
{ label: '1', value: 1 },
|
||||
{ label: '20', value: 20 }
|
||||
]}
|
||||
width={'100%'}
|
||||
min={1}
|
||||
max={20}
|
||||
step={1}
|
||||
value={value.maxFiles ?? 5}
|
||||
onChange={(e) => {
|
||||
onChange({
|
||||
...value,
|
||||
maxFiles: e
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
</Box>
|
||||
</ModalBody>
|
||||
<ModalFooter>
|
||||
<Button onClick={onClose} px={8}>
|
||||
{t('common:common.Confirm')}
|
||||
</Button>
|
||||
</ModalFooter>
|
||||
</MyModal>
|
||||
</Flex>
|
||||
);
|
||||
};
|
||||
|
||||
export default FileSelect;
|
@@ -9,7 +9,8 @@ enum FnTypeEnum {
|
||||
nextQuestion = 'nextQuestion',
|
||||
tts = 'tts',
|
||||
variable = 'variable',
|
||||
welcome = 'welcome'
|
||||
welcome = 'welcome',
|
||||
file = 'file'
|
||||
}
|
||||
|
||||
const ChatFunctionTip = ({ type }: { type: `${FnTypeEnum}` }) => {
|
||||
@@ -46,6 +47,12 @@ const ChatFunctionTip = ({ type }: { type: `${FnTypeEnum}` }) => {
|
||||
title: t('common:core.app.Welcome Text'),
|
||||
desc: t('common:core.app.tip.welcomeTextTip'),
|
||||
imgUrl: '/imgs/app/welcome.svg'
|
||||
},
|
||||
[FnTypeEnum.file]: {
|
||||
icon: '/imgs/app/welcome-icon.svg',
|
||||
title: t('app:file_upload'),
|
||||
desc: t('app:file_upload_tip'),
|
||||
imgUrl: '/imgs/app/fileUploadPlaceholder.svg'
|
||||
}
|
||||
});
|
||||
const data = map.current[type];
|
||||
|
@@ -1,16 +1,14 @@
|
||||
import { useSpeech } from '@/web/common/hooks/useSpeech';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react';
|
||||
import React, { useRef, useEffect, useCallback } from 'react';
|
||||
import { Box, Flex, HStack, Image, Spinner, Textarea } from '@chakra-ui/react';
|
||||
import React, { useRef, useEffect, useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
|
||||
import { compressImgFileAndUpload } from '@/web/common/file/controller';
|
||||
import { uploadFile2DB } from '@/web/common/file/controller';
|
||||
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { addDays } from 'date-fns';
|
||||
import { useRequest } from '@fastgpt/web/hooks/useRequest';
|
||||
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from '../type';
|
||||
import { textareaMinH } from '../constants';
|
||||
import { UseFormReturn, useFieldArray } from 'react-hook-form';
|
||||
@@ -19,103 +17,167 @@ import dynamic from 'next/dynamic';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import { documentFileType } from '@fastgpt/global/common/file/constants';
|
||||
import { getFileIcon } from '@fastgpt/global/common/file/icon';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { clone } from 'lodash';
|
||||
import { formatFileSize } from '@fastgpt/global/common/file/tools';
|
||||
|
||||
const InputGuideBox = dynamic(() => import('./InputGuideBox'));
|
||||
|
||||
const fileTypeFilter = (file: File) => {
|
||||
return (
|
||||
file.type.includes('image') ||
|
||||
documentFileType.split(',').some((type) => file.name.endsWith(type.trim()))
|
||||
);
|
||||
};
|
||||
|
||||
const ChatInput = ({
|
||||
onSendMessage,
|
||||
onStop,
|
||||
TextareaDom,
|
||||
showFileSelector = false,
|
||||
resetInputVal,
|
||||
chatForm,
|
||||
appId
|
||||
}: {
|
||||
onSendMessage: (val: ChatBoxInputType & { autoTTSResponse?: boolean }) => void;
|
||||
onStop: () => void;
|
||||
showFileSelector?: boolean;
|
||||
TextareaDom: React.MutableRefObject<HTMLTextAreaElement | null>;
|
||||
resetInputVal: (val: ChatBoxInputType) => void;
|
||||
chatForm: UseFormReturn<ChatBoxInputFormType>;
|
||||
appId: string;
|
||||
}) => {
|
||||
const { isPc } = useSystem();
|
||||
const { toast } = useToast();
|
||||
const { t } = useTranslation();
|
||||
const { feConfigs } = useSystemStore();
|
||||
|
||||
const { setValue, watch, control } = chatForm;
|
||||
const inputValue = watch('input');
|
||||
const {
|
||||
update: updateFile,
|
||||
remove: removeFile,
|
||||
update: updateFiles,
|
||||
remove: removeFiles,
|
||||
fields: fileList,
|
||||
append: appendFile,
|
||||
replace: replaceFile
|
||||
replace: replaceFiles
|
||||
} = useFieldArray({
|
||||
control,
|
||||
name: 'files'
|
||||
});
|
||||
|
||||
const { isChatting, whisperConfig, autoTTSResponse, chatInputGuide, outLinkAuthData } =
|
||||
useContextSelector(ChatBoxContext, (v) => v);
|
||||
const { whisperModel } = useSystemStore();
|
||||
const { isPc } = useSystem();
|
||||
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const { t } = useTranslation();
|
||||
const {
|
||||
chatId,
|
||||
isChatting,
|
||||
whisperConfig,
|
||||
autoTTSResponse,
|
||||
chatInputGuide,
|
||||
outLinkAuthData,
|
||||
fileSelectConfig
|
||||
} = useContextSelector(ChatBoxContext, (v) => v);
|
||||
|
||||
const havInput = !!inputValue || fileList.length > 0;
|
||||
const hasFileUploading = fileList.some((item) => !item.url);
|
||||
const canSendMessage = havInput && !hasFileUploading;
|
||||
|
||||
const showSelectFile = fileSelectConfig.canSelectFile;
|
||||
const showSelectImg = fileSelectConfig.canSelectImg;
|
||||
const maxSelectFiles = fileSelectConfig.maxFiles ?? 10;
|
||||
const maxSize = (feConfigs?.uploadFileMaxSize || 1024) * 1024 * 1024; // nkb
|
||||
const { icon: selectFileIcon, tooltip: selectFileTip } = useMemo(() => {
|
||||
if (showSelectFile) {
|
||||
return {
|
||||
icon: 'core/chat/fileSelect',
|
||||
tooltip: t('chat:select_file')
|
||||
};
|
||||
} else if (showSelectImg) {
|
||||
return {
|
||||
icon: 'core/chat/fileSelect',
|
||||
tooltip: t('chat:select_img')
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}, [showSelectFile, showSelectImg, t]);
|
||||
|
||||
/* file selector and upload */
|
||||
const { File, onOpen: onOpenSelectFile } = useSelectFile({
|
||||
fileType: 'image/*',
|
||||
fileType: `${showSelectImg ? 'image/*,' : ''} ${showSelectFile ? documentFileType : ''}`,
|
||||
multiple: true,
|
||||
maxCount: 10
|
||||
maxCount: maxSelectFiles
|
||||
});
|
||||
const { mutate: uploadFile } = useRequest({
|
||||
mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => {
|
||||
if (file.type === ChatFileTypeEnum.image && file.rawFile) {
|
||||
useRequest2(
|
||||
async () => {
|
||||
const filterFiles = fileList.filter((item) => item.status === 0);
|
||||
|
||||
if (filterFiles.length === 0) return;
|
||||
|
||||
replaceFiles(fileList.map((item) => ({ ...item, status: 1 })));
|
||||
|
||||
for (const file of filterFiles) {
|
||||
if (!file.rawFile) continue;
|
||||
|
||||
try {
|
||||
const url = await compressImgFileAndUpload({
|
||||
type: MongoImageTypeEnum.chatImage,
|
||||
const { fileId, previewUrl } = await uploadFile2DB({
|
||||
file: file.rawFile,
|
||||
maxW: 4320,
|
||||
maxH: 4320,
|
||||
maxSize: 1024 * 1024 * 16,
|
||||
// 7 day expired.
|
||||
expiredTime: addDays(new Date(), 7),
|
||||
...outLinkAuthData
|
||||
bucketName: 'chat',
|
||||
metadata: {
|
||||
chatId
|
||||
}
|
||||
});
|
||||
updateFile(fileIndex, {
|
||||
|
||||
updateFiles(fileList.findIndex((item) => item.id === file.id)!, {
|
||||
...file,
|
||||
url
|
||||
status: 1,
|
||||
url: `${location.origin}${previewUrl}`
|
||||
});
|
||||
} catch (error) {
|
||||
removeFile(fileIndex);
|
||||
removeFiles(fileList.findIndex((item) => item.id === file.id)!);
|
||||
console.log(error);
|
||||
return Promise.reject(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
errorToast: t('common:common.Upload File Failed')
|
||||
});
|
||||
{
|
||||
manual: false,
|
||||
errorToast: t('common:upload_file_error'),
|
||||
refreshDeps: [fileList]
|
||||
}
|
||||
);
|
||||
const onSelectFile = useCallback(
|
||||
async (files: File[]) => {
|
||||
if (!files || files.length === 0) {
|
||||
return;
|
||||
}
|
||||
// filter max files
|
||||
if (fileList.length + files.length > maxSelectFiles) {
|
||||
files = files.slice(0, maxSelectFiles - fileList.length);
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: t('chat:file_amount_over', { max: maxSelectFiles })
|
||||
});
|
||||
}
|
||||
|
||||
const filterFilesByMaxSize = files.filter((file) => file.size <= maxSize);
|
||||
if (filterFilesByMaxSize.length < files.length) {
|
||||
toast({
|
||||
status: 'warning',
|
||||
title: t('file:some_file_size_exceeds_limit', { maxSize: formatFileSize(maxSize) })
|
||||
});
|
||||
}
|
||||
|
||||
const loadFiles = await Promise.all(
|
||||
files.map(
|
||||
filterFilesByMaxSize.map(
|
||||
(file) =>
|
||||
new Promise<UserInputFileItemType>((resolve, reject) => {
|
||||
if (file.type.includes('image')) {
|
||||
const reader = new FileReader();
|
||||
reader.readAsDataURL(file);
|
||||
reader.onload = () => {
|
||||
const item = {
|
||||
const item: UserInputFileItemType = {
|
||||
id: getNanoid(6),
|
||||
rawFile: file,
|
||||
type: ChatFileTypeEnum.image,
|
||||
name: file.name,
|
||||
icon: reader.result as string
|
||||
icon: reader.result as string,
|
||||
status: 0
|
||||
};
|
||||
resolve(item);
|
||||
};
|
||||
@@ -128,22 +190,28 @@ const ChatInput = ({
|
||||
rawFile: file,
|
||||
type: ChatFileTypeEnum.file,
|
||||
name: file.name,
|
||||
icon: 'file/pdf'
|
||||
icon: getFileIcon(file.name),
|
||||
status: 0
|
||||
});
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
appendFile(loadFiles);
|
||||
|
||||
loadFiles.forEach((file, i) =>
|
||||
uploadFile({
|
||||
file,
|
||||
fileIndex: i + fileList.length
|
||||
// Document, image
|
||||
const concatFileList = clone(
|
||||
fileList.concat(loadFiles).sort((a, b) => {
|
||||
if (a.type === ChatFileTypeEnum.image && b.type === ChatFileTypeEnum.file) {
|
||||
return 1;
|
||||
} else if (a.type === ChatFileTypeEnum.file && b.type === ChatFileTypeEnum.image) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
})
|
||||
);
|
||||
replaceFiles(concatFileList);
|
||||
},
|
||||
[appendFile, fileList.length, uploadFile]
|
||||
[fileList, maxSelectFiles, replaceFiles, toast, t]
|
||||
);
|
||||
|
||||
/* on send */
|
||||
@@ -155,10 +223,12 @@ const ChatInput = ({
|
||||
text: textareaValue.trim(),
|
||||
files: fileList
|
||||
});
|
||||
replaceFile([]);
|
||||
replaceFiles([]);
|
||||
};
|
||||
|
||||
/* whisper init */
|
||||
const { whisperModel } = useSystemStore();
|
||||
const canvasRef = useRef<HTMLCanvasElement>(null);
|
||||
const {
|
||||
isSpeaking,
|
||||
isTransCription,
|
||||
@@ -194,12 +264,12 @@ const ChatInput = ({
|
||||
files: fileList,
|
||||
autoTTSResponse
|
||||
});
|
||||
replaceFile([]);
|
||||
replaceFiles([]);
|
||||
} else {
|
||||
resetInputVal({ text });
|
||||
}
|
||||
},
|
||||
[autoTTSResponse, fileList, onSendMessage, replaceFile, resetInputVal, whisperConfig?.autoSend]
|
||||
[autoTTSResponse, fileList, onSendMessage, replaceFiles, resetInputVal, whisperConfig?.autoSend]
|
||||
);
|
||||
const onWhisperRecord = useCallback(() => {
|
||||
if (isSpeaking) {
|
||||
@@ -261,13 +331,20 @@ const ChatInput = ({
|
||||
</Flex>
|
||||
|
||||
{/* file preview */}
|
||||
<Flex wrap={'wrap'} px={[2, 4]} userSelect={'none'}>
|
||||
<Flex
|
||||
wrap={'wrap'}
|
||||
px={[2, 4]}
|
||||
userSelect={'none'}
|
||||
gap={2}
|
||||
mb={fileList.length > 0 ? 2 : 0}
|
||||
>
|
||||
{fileList.map((item, index) => (
|
||||
<Box
|
||||
key={item.id}
|
||||
border={'1px solid rgba(0,0,0,0.12)'}
|
||||
mr={2}
|
||||
mb={2}
|
||||
border={'1px solid #E8EBF0'}
|
||||
boxShadow={
|
||||
'0px 2.571px 6.429px 0px rgba(19, 51, 107, 0.08), 0px 0px 0.643px 0px rgba(19, 51, 107, 0.08)'
|
||||
}
|
||||
rounded={'md'}
|
||||
position={'relative'}
|
||||
_hover={{
|
||||
@@ -297,13 +374,13 @@ const ChatInput = ({
|
||||
h={'16px'}
|
||||
color={'myGray.700'}
|
||||
cursor={'pointer'}
|
||||
_hover={{ color: 'primary.500' }}
|
||||
_hover={{ color: 'red.500' }}
|
||||
position={'absolute'}
|
||||
bg={'white'}
|
||||
right={'-8px'}
|
||||
top={'-8px'}
|
||||
onClick={() => {
|
||||
removeFile(index);
|
||||
removeFiles(index);
|
||||
}}
|
||||
className="close-icon"
|
||||
display={['', 'none']}
|
||||
@@ -312,19 +389,27 @@ const ChatInput = ({
|
||||
<Image
|
||||
alt={'img'}
|
||||
src={item.icon}
|
||||
w={['50px', '70px']}
|
||||
h={['50px', '70px']}
|
||||
w={['2rem', '3rem']}
|
||||
h={['2rem', '3rem']}
|
||||
borderRadius={'md'}
|
||||
objectFit={'contain'}
|
||||
/>
|
||||
)}
|
||||
{item.type === ChatFileTypeEnum.file && (
|
||||
<HStack minW={['100px', '150px']} maxW={'250px'} p={2}>
|
||||
<MyIcon name={item.icon as any} w={['1.5rem', '2rem']} h={['1.5rem', '2rem']} />
|
||||
<Box flex={'1 0 0'} className="textEllipsis" fontSize={'xs'}>
|
||||
{item.name}
|
||||
</Box>
|
||||
</HStack>
|
||||
)}
|
||||
</Box>
|
||||
))}
|
||||
</Flex>
|
||||
|
||||
<Flex alignItems={'flex-end'} mt={fileList.length > 0 ? 1 : 0} pl={[2, 4]}>
|
||||
{/* file selector */}
|
||||
{showFileSelector && (
|
||||
{(showSelectFile || showSelectImg) && (
|
||||
<Flex
|
||||
h={'22px'}
|
||||
alignItems={'center'}
|
||||
@@ -336,8 +421,8 @@ const ChatInput = ({
|
||||
onOpenSelectFile();
|
||||
}}
|
||||
>
|
||||
<MyTooltip label={t('common:core.chat.Select Image')}>
|
||||
<MyIcon name={'core/chat/fileSelect'} w={'18px'} color={'myGray.600'} />
|
||||
<MyTooltip label={selectFileTip}>
|
||||
<MyIcon name={selectFileIcon as any} w={'18px'} color={'myGray.600'} />
|
||||
</MyTooltip>
|
||||
<File onSelect={onSelectFile} />
|
||||
</Flex>
|
||||
@@ -404,12 +489,19 @@ const ChatInput = ({
|
||||
}}
|
||||
onPaste={(e) => {
|
||||
const clipboardData = e.clipboardData;
|
||||
if (clipboardData && showFileSelector) {
|
||||
if (clipboardData && (showSelectFile || showSelectImg)) {
|
||||
const items = clipboardData.items;
|
||||
const files = Array.from(items)
|
||||
.map((item) => (item.kind === 'file' ? item.getAsFile() : undefined))
|
||||
.filter(Boolean) as File[];
|
||||
.filter((file) => {
|
||||
console.log(file);
|
||||
return file && fileTypeFilter(file);
|
||||
}) as File[];
|
||||
onSelectFile(files);
|
||||
|
||||
if (files.length > 0) {
|
||||
e.stopPropagation();
|
||||
}
|
||||
}
|
||||
}}
|
||||
/>
|
||||
|
@@ -3,6 +3,7 @@ import { useAudioPlay } from '@/web/common/utils/voice';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import {
|
||||
AppChatConfigType,
|
||||
AppFileSelectConfigType,
|
||||
AppTTSConfigType,
|
||||
AppWhisperConfigType,
|
||||
ChatInputGuideConfigType,
|
||||
@@ -10,6 +11,7 @@ import {
|
||||
} from '@fastgpt/global/core/app/type';
|
||||
import { ChatHistoryItemResType, ChatSiteItemType } from '@fastgpt/global/core/chat/type';
|
||||
import {
|
||||
defaultAppSelectFileConfig,
|
||||
defaultChatInputGuideConfig,
|
||||
defaultTTSConfig,
|
||||
defaultWhisperConfig
|
||||
@@ -64,6 +66,7 @@ type useChatStoreType = OutLinkChatAuthProps &
|
||||
chatInputGuide: ChatInputGuideConfigType;
|
||||
outLinkAuthData: OutLinkChatAuthProps;
|
||||
getHistoryResponseData: ({ dataId }: { dataId: string }) => Promise<ChatHistoryItemResType[]>;
|
||||
fileSelectConfig: AppFileSelectConfigType;
|
||||
};
|
||||
|
||||
export const ChatBoxContext = createContext<useChatStoreType>({
|
||||
@@ -146,7 +149,8 @@ const Provider = ({
|
||||
questionGuide = false,
|
||||
ttsConfig = defaultTTSConfig,
|
||||
whisperConfig = defaultWhisperConfig,
|
||||
chatInputGuide = defaultChatInputGuideConfig
|
||||
chatInputGuide = defaultChatInputGuideConfig,
|
||||
fileSelectConfig = defaultAppSelectFileConfig
|
||||
} = useMemo(() => chatConfig, [chatConfig]);
|
||||
|
||||
const outLinkAuthData = useMemo(
|
||||
@@ -215,6 +219,7 @@ const Provider = ({
|
||||
allVariableList: variables,
|
||||
questionGuide,
|
||||
ttsConfig,
|
||||
fileSelectConfig,
|
||||
whisperConfig,
|
||||
autoTTSResponse,
|
||||
startSegmentedAudio,
|
||||
|
@@ -73,12 +73,11 @@ const ChatItem = ({
|
||||
const ContentCard = useMemo(() => {
|
||||
if (type === 'Human') {
|
||||
const { text, files = [] } = formatChatValue2InputType(chat.value);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Flex flexDirection={'column'} gap={4}>
|
||||
{files.length > 0 && <FilesBlock files={files} />}
|
||||
<Markdown source={text} />
|
||||
</>
|
||||
{text && <Markdown source={text} />}
|
||||
</Flex>
|
||||
);
|
||||
}
|
||||
|
||||
|
@@ -1,22 +1,89 @@
|
||||
import { Box, Flex, Grid } from '@chakra-ui/react';
|
||||
import { Box, Flex, Grid, Text } from '@chakra-ui/react';
|
||||
import MdImage from '@/components/Markdown/img/Image';
|
||||
import { UserInputFileItemType } from '@/components/core/chat/ChatContainer/ChatBox/type';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import React, { useCallback, useLayoutEffect, useMemo, useRef, useState } from 'react';
|
||||
import { clone } from 'lodash';
|
||||
import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import { useWidthVariable } from '@fastgpt/web/hooks/useWidthVariable';
|
||||
|
||||
const FilesBlock = ({ files }: { files: UserInputFileItemType[] }) => {
|
||||
const chartRef = useRef<HTMLDivElement>(null);
|
||||
const [width, setWidth] = useState(400);
|
||||
const { isPc } = useSystem();
|
||||
const gridColumns = useWidthVariable({
|
||||
width,
|
||||
widthList: [300, 500, 700],
|
||||
list: ['1fr', 'repeat(2, 1fr)', 'repeat(3, 1fr)']
|
||||
});
|
||||
|
||||
// sort files, file->image
|
||||
const sortFiles = useMemo(() => {
|
||||
return clone(files).sort((a, b) => {
|
||||
if (a.type === ChatFileTypeEnum.image && b.type === ChatFileTypeEnum.file) {
|
||||
return 1;
|
||||
} else if (a.type === ChatFileTypeEnum.file && b.type === ChatFileTypeEnum.image) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
});
|
||||
}, [files]);
|
||||
|
||||
const computedChatItemWidth = useCallback(() => {
|
||||
if (!chartRef.current) return;
|
||||
|
||||
// 一直找到 parent = markdown 的元素
|
||||
let parent = chartRef.current?.parentElement;
|
||||
while (parent && !parent.className.includes('chat-box-card')) {
|
||||
parent = parent.parentElement;
|
||||
}
|
||||
|
||||
const clientWidth = parent?.clientWidth ?? 400;
|
||||
setWidth(clientWidth);
|
||||
return parent;
|
||||
}, [isPc]);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
computedChatItemWidth();
|
||||
}, [computedChatItemWidth]);
|
||||
|
||||
return (
|
||||
<Grid gridTemplateColumns={['1fr', '1fr 1fr']} gap={4}>
|
||||
{files.map(({ id, type, name, url }, i) => {
|
||||
if (type === 'image') {
|
||||
return (
|
||||
<Box key={i} rounded={'md'} flex={'1 0 0'} minW={'120px'}>
|
||||
<MdImage src={url} />
|
||||
</Box>
|
||||
);
|
||||
}
|
||||
return null;
|
||||
})}
|
||||
<Grid ref={chartRef} gridTemplateColumns={gridColumns} gap={4} alignItems={'flex-start'}>
|
||||
{sortFiles.map(({ id, type, name, url, icon }, i) => (
|
||||
<Box key={i} bg={'white'} borderRadius={'md'} overflow="hidden">
|
||||
{type === 'image' && <MdImage src={url} minH={'100px'} my={0} />}
|
||||
{type === 'file' && (
|
||||
<Flex
|
||||
p={2}
|
||||
w={'100%'}
|
||||
alignItems="center"
|
||||
cursor={'pointer'}
|
||||
onClick={() => {
|
||||
window.open(url);
|
||||
}}
|
||||
>
|
||||
<MyIcon
|
||||
name={icon as any}
|
||||
flexShrink={0}
|
||||
w={['1.5rem', '2rem']}
|
||||
h={['1.5rem', '2rem']}
|
||||
/>
|
||||
<Text
|
||||
ml={2}
|
||||
fontSize={'xs'}
|
||||
overflow="hidden"
|
||||
textOverflow="ellipsis"
|
||||
whiteSpace="nowrap"
|
||||
>
|
||||
{name || url}
|
||||
</Text>
|
||||
</Flex>
|
||||
)}
|
||||
</Box>
|
||||
))}
|
||||
</Grid>
|
||||
);
|
||||
};
|
||||
|
||||
export default FilesBlock;
|
||||
export default React.memo(FilesBlock);
|
||||
|
@@ -75,7 +75,6 @@ type Props = OutLinkChatAuthProps &
|
||||
showVoiceIcon?: boolean;
|
||||
showEmptyIntro?: boolean;
|
||||
userAvatar?: string;
|
||||
showFileSelector?: boolean;
|
||||
active?: boolean; // can use
|
||||
appId: string;
|
||||
|
||||
@@ -105,7 +104,6 @@ const ChatBox = (
|
||||
showEmptyIntro = false,
|
||||
appAvatar,
|
||||
userAvatar,
|
||||
showFileSelector,
|
||||
active = true,
|
||||
appId,
|
||||
chatId,
|
||||
@@ -378,7 +376,9 @@ const ChatBox = (
|
||||
return;
|
||||
}
|
||||
|
||||
// Abort the previous request
|
||||
abortRequest();
|
||||
questionGuideController.current?.abort('stop');
|
||||
|
||||
text = text.trim();
|
||||
|
||||
@@ -390,14 +390,13 @@ const ChatBox = (
|
||||
return;
|
||||
}
|
||||
|
||||
// delete invalid variables, 只保留在 variableList 中的变量
|
||||
// Only declared variables are kept
|
||||
const requestVariables: Record<string, any> = {};
|
||||
allVariableList?.forEach((item) => {
|
||||
requestVariables[item.key] = variables[item.key] || '';
|
||||
});
|
||||
|
||||
const responseChatId = getNanoid(24);
|
||||
questionGuideController.current?.abort('stop');
|
||||
|
||||
// set auto audio playing
|
||||
if (autoTTSResponse) {
|
||||
@@ -980,7 +979,6 @@ const ChatBox = (
|
||||
onStop={() => chatController.current?.abort('stop')}
|
||||
TextareaDom={TextareaDom}
|
||||
resetInputVal={resetInputVal}
|
||||
showFileSelector={showFileSelector}
|
||||
chatForm={chatForm}
|
||||
appId={appId}
|
||||
/>
|
||||
|
@@ -13,6 +13,7 @@ export type UserInputFileItemType = {
|
||||
type: `${ChatFileTypeEnum}`;
|
||||
name: string;
|
||||
icon: string; // img is base64
|
||||
status: 0 | 1; // 0: uploading, 1: success
|
||||
url?: string;
|
||||
};
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
import { ChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ChatBoxInputType, UserInputFileItemType } from './type';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { getFileIcon } from '@fastgpt/global/common/file/icon';
|
||||
|
||||
export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => {
|
||||
if (!value) {
|
||||
@@ -15,15 +16,16 @@ export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): Chat
|
||||
.filter((item) => item.text?.content)
|
||||
.map((item) => item.text?.content || '')
|
||||
.join('');
|
||||
|
||||
const files =
|
||||
(value
|
||||
.map((item) =>
|
||||
?.map((item) =>
|
||||
item.type === 'file' && item.file
|
||||
? {
|
||||
id: getNanoid(),
|
||||
id: item.file.url,
|
||||
type: item.file.type,
|
||||
name: item.file.name,
|
||||
icon: '',
|
||||
icon: getFileIcon(item.file.name),
|
||||
url: item.file.url
|
||||
}
|
||||
: undefined
|
||||
|
@@ -105,19 +105,19 @@ ${JSON.stringify(questionGuides)}`;
|
||||
overflowY={'auto'}
|
||||
>
|
||||
{toolParams && toolParams !== '{}' && (
|
||||
<Markdown
|
||||
source={`~~~json#Input
|
||||
${toolParams}`}
|
||||
/>
|
||||
)}
|
||||
{toolResponse && (
|
||||
<Box mt={3}>
|
||||
<Box mb={3}>
|
||||
<Markdown
|
||||
source={`~~~json#Response
|
||||
${toolResponse}`}
|
||||
source={`~~~json#Input
|
||||
${toolParams}`}
|
||||
/>
|
||||
</Box>
|
||||
)}
|
||||
{toolResponse && (
|
||||
<Markdown
|
||||
source={`~~~json#Response
|
||||
${toolResponse}`}
|
||||
/>
|
||||
)}
|
||||
</AccordionPanel>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import { Box, Flex, BoxProps, useDisclosure } from '@chakra-ui/react';
|
||||
import { Box, Flex, BoxProps, useDisclosure, HStack } from '@chakra-ui/react';
|
||||
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { moduleTemplatesFlat } from '@fastgpt/global/core/workflow/template/constants';
|
||||
@@ -16,6 +16,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
import { getFileIcon } from '@fastgpt/global/common/file/icon';
|
||||
|
||||
type sideTabItemType = {
|
||||
moduleLogo?: string;
|
||||
@@ -34,7 +35,7 @@ function RowRender({
|
||||
}: { children: React.ReactNode; label: string } & BoxProps) {
|
||||
return (
|
||||
<Box mb={3}>
|
||||
<Box fontSize={'sm'} mb={mb} flex={'0 0 90px'}>
|
||||
<Box fontSize={'sm'} mb={mb} color={'myGray.800'} flex={'0 0 90px'}>
|
||||
{label}:
|
||||
</Box>
|
||||
<Box borderRadius={'sm'} fontSize={['xs', 'sm']} bg={'myGray.50'} {...props}>
|
||||
@@ -435,9 +436,50 @@ export const WholeResponseContent = ({
|
||||
value={activeModule?.textOutput}
|
||||
/>
|
||||
{/* code */}
|
||||
<Row label={workflowT('response.Custom outputs')} value={activeModule?.customOutputs} />
|
||||
<Row label={workflowT('response.Custom inputs')} value={activeModule?.customInputs} />
|
||||
<Row label={workflowT('response.Code log')} value={activeModule?.codeLog} />
|
||||
<>
|
||||
<Row
|
||||
label={t('workflow:response.Custom outputs')}
|
||||
value={activeModule?.customOutputs}
|
||||
/>
|
||||
<Row label={t('workflow:response.Custom inputs')} value={activeModule?.customInputs} />
|
||||
<Row label={t('workflow:response.Code log')} value={activeModule?.codeLog} />
|
||||
</>
|
||||
|
||||
{/* read files */}
|
||||
<>
|
||||
{activeModule?.readFiles && activeModule?.readFiles.length > 0 && (
|
||||
<Row
|
||||
label={t('workflow:response.read files')}
|
||||
rawDom={
|
||||
<Flex flexWrap={'wrap'} gap={3} px={4} py={2}>
|
||||
{activeModule?.readFiles.map((file, i) => (
|
||||
<HStack
|
||||
key={i}
|
||||
bg={'white'}
|
||||
boxShadow={'base'}
|
||||
borderRadius={'sm'}
|
||||
py={1}
|
||||
px={2}
|
||||
{...(file.url
|
||||
? {
|
||||
cursor: 'pointer',
|
||||
onClick: () => window.open(file.url)
|
||||
}
|
||||
: {})}
|
||||
>
|
||||
<MyIcon name={getFileIcon(file.name) as any} w={'1rem'} />
|
||||
<Box>{file.name}</Box>
|
||||
</HStack>
|
||||
))}
|
||||
</Flex>
|
||||
}
|
||||
/>
|
||||
)}
|
||||
<Row
|
||||
label={t('workflow:response.Read file result')}
|
||||
value={activeModule?.readFilesResult}
|
||||
/>
|
||||
</>
|
||||
</Box>
|
||||
)}
|
||||
</>
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { AppSchema } from '@fastgpt/global/core/app/type';
|
||||
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
|
||||
import { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
|
||||
|
@@ -38,7 +38,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
})();
|
||||
|
||||
res.setHeader('Content-Type', `${file.contentType}; charset=${encoding}`);
|
||||
res.setHeader('Cache-Control', 'public, max-age=3600');
|
||||
res.setHeader('Cache-Control', 'public, max-age=31536000');
|
||||
res.setHeader('Content-Disposition', `inline; filename="${encodeURIComponent(file.filename)}"`);
|
||||
|
||||
stream.pipe(res);
|
||||
|
@@ -1,12 +1,14 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
|
||||
import { getUploadModel } from '@fastgpt/service/common/file/multer';
|
||||
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { createFileToken } from '@fastgpt/service/support/permission/controller';
|
||||
import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
/* Creates the multer uploader */
|
||||
const upload = getUploadModel({
|
||||
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
|
||||
@@ -14,11 +16,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
const filePaths: string[] = [];
|
||||
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { file, bucketName, metadata } = await upload.doUpload(req, res);
|
||||
|
||||
filePaths.push(file.path);
|
||||
|
||||
const { teamId, tmbId } = await authCert({ req, authToken: true });
|
||||
|
||||
if (!bucketName) {
|
||||
@@ -35,8 +34,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
metadata: metadata
|
||||
});
|
||||
|
||||
jsonRes(res, {
|
||||
data: fileId
|
||||
jsonRes<{
|
||||
fileId: string;
|
||||
previewUrl: string;
|
||||
}>(res, {
|
||||
data: {
|
||||
fileId,
|
||||
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken(
|
||||
{
|
||||
bucketName,
|
||||
teamId,
|
||||
tmbId,
|
||||
fileId
|
||||
}
|
||||
)}`
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
jsonRes(res, {
|
||||
@@ -48,6 +60,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
removeFilesByPaths(filePaths);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
||||
export const config = {
|
||||
api: {
|
||||
bodyParser: false
|
||||
|
@@ -15,6 +15,7 @@ import {
|
||||
import { findAppAndAllChildren } from '@fastgpt/service/core/app/controller';
|
||||
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
|
||||
import { ClientSession } from '@fastgpt/service/common/mongo';
|
||||
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
|
||||
const { appId } = req.query as { appId: string };
|
||||
@@ -53,6 +54,7 @@ export const onDelOneApp = async ({
|
||||
for await (const app of apps) {
|
||||
const appId = app._id;
|
||||
// Chats
|
||||
await deleteChatFiles({ appId });
|
||||
await MongoChatItem.deleteMany(
|
||||
{
|
||||
appId
|
||||
|
@@ -21,6 +21,7 @@ import {
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
|
||||
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
|
||||
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
|
||||
|
||||
export type Props = {
|
||||
messages: ChatCompletionMessageParam[];
|
||||
@@ -29,6 +30,7 @@ export type Props = {
|
||||
variables: Record<string, any>;
|
||||
appId: string;
|
||||
appName: string;
|
||||
chatConfig: AppChatConfigType;
|
||||
};
|
||||
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
@@ -40,7 +42,15 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
res.end();
|
||||
});
|
||||
|
||||
let { nodes = [], edges = [], messages = [], variables = {}, appName, appId } = req.body as Props;
|
||||
let {
|
||||
nodes = [],
|
||||
edges = [],
|
||||
messages = [],
|
||||
variables = {},
|
||||
appName,
|
||||
appId,
|
||||
chatConfig
|
||||
} = req.body as Props;
|
||||
try {
|
||||
// [histories, user]
|
||||
const chatMessages = GPTMessages2Chats(messages);
|
||||
@@ -79,6 +89,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
/* start process */
|
||||
const { flowResponses, flowUsages } = await dispatchWorkFlow({
|
||||
res,
|
||||
requestOrigin: req.headers.origin,
|
||||
mode: 'test',
|
||||
teamId,
|
||||
tmbId,
|
||||
@@ -88,6 +99,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
runtimeEdges: edges,
|
||||
variables,
|
||||
query: removeEmptyUserInput(userInput),
|
||||
chatConfig,
|
||||
histories: chatMessages,
|
||||
stream: true,
|
||||
detail: true,
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { connectToDatabase } from '@/service/mongo';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
@@ -8,64 +7,71 @@ import { ClearHistoriesProps } from '@/global/core/chat/api';
|
||||
import { authOutLink } from '@/service/support/permission/auth/outLink';
|
||||
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
|
||||
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
|
||||
/* clear chat history */
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
const { appId, shareId, outLinkUid, teamId, teamToken } = req.query as ClearHistoriesProps;
|
||||
async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
const { appId, shareId, outLinkUid, teamId, teamToken } = req.query as ClearHistoriesProps;
|
||||
|
||||
let chatAppId = appId;
|
||||
let chatAppId = appId!;
|
||||
|
||||
const match = await (async () => {
|
||||
if (shareId && outLinkUid) {
|
||||
const { appId, uid } = await authOutLink({ shareId, outLinkUid });
|
||||
const match = await (async () => {
|
||||
if (shareId && outLinkUid) {
|
||||
const { appId, uid } = await authOutLink({ shareId, outLinkUid });
|
||||
|
||||
chatAppId = appId;
|
||||
return {
|
||||
shareId,
|
||||
outLinkUid: uid
|
||||
};
|
||||
}
|
||||
if (teamId && teamToken) {
|
||||
const { uid } = await authTeamSpaceToken({ teamId, teamToken });
|
||||
return {
|
||||
teamId,
|
||||
appId,
|
||||
outLinkUid: uid
|
||||
};
|
||||
}
|
||||
if (appId) {
|
||||
const { tmbId } = await authCert({ req, authToken: true });
|
||||
chatAppId = appId;
|
||||
return {
|
||||
shareId,
|
||||
outLinkUid: uid
|
||||
};
|
||||
}
|
||||
if (teamId && teamToken) {
|
||||
const { uid } = await authTeamSpaceToken({ teamId, teamToken });
|
||||
return {
|
||||
teamId,
|
||||
appId,
|
||||
outLinkUid: uid
|
||||
};
|
||||
}
|
||||
if (appId) {
|
||||
const { tmbId } = await authCert({ req, authToken: true });
|
||||
|
||||
return {
|
||||
tmbId,
|
||||
appId,
|
||||
source: ChatSourceEnum.online
|
||||
};
|
||||
}
|
||||
return {
|
||||
tmbId,
|
||||
appId,
|
||||
source: ChatSourceEnum.online
|
||||
};
|
||||
}
|
||||
|
||||
return Promise.reject('Param are error');
|
||||
})();
|
||||
return Promise.reject('Param are error');
|
||||
})();
|
||||
|
||||
// find chatIds
|
||||
const list = await MongoChat.find(match, 'chatId').lean();
|
||||
const idList = list.map((item) => item.chatId);
|
||||
// find chatIds
|
||||
const list = await MongoChat.find(match, 'chatId').lean();
|
||||
const idList = list.map((item) => item.chatId);
|
||||
|
||||
await MongoChatItem.deleteMany({
|
||||
appId: chatAppId,
|
||||
chatId: { $in: idList }
|
||||
});
|
||||
await MongoChat.deleteMany({
|
||||
appId: chatAppId,
|
||||
chatId: { $in: idList }
|
||||
});
|
||||
await deleteChatFiles({ chatIdList: idList });
|
||||
|
||||
jsonRes(res);
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
code: 500,
|
||||
error: err
|
||||
});
|
||||
}
|
||||
await mongoSessionRun(async (session) => {
|
||||
await MongoChatItem.deleteMany(
|
||||
{
|
||||
appId: chatAppId,
|
||||
chatId: { $in: idList }
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
await MongoChat.deleteMany(
|
||||
{
|
||||
appId: chatAppId,
|
||||
chatId: { $in: idList }
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
});
|
||||
|
||||
jsonRes(res);
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import type { NextApiResponse } from 'next';
|
||||
import { jsonRes } from '@fastgpt/service/common/response';
|
||||
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
|
||||
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
@@ -8,6 +8,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { WritePermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
|
||||
|
||||
/* clear chat history */
|
||||
async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiResponse) {
|
||||
@@ -20,6 +21,7 @@ async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiRe
|
||||
per: WritePermissionVal
|
||||
});
|
||||
|
||||
await deleteChatFiles({ chatIdList: [chatId] });
|
||||
await mongoSessionRun(async (session) => {
|
||||
await MongoChatItem.deleteMany(
|
||||
{
|
||||
@@ -28,7 +30,7 @@ async function handler(req: ApiRequestProps<{}, DelHistoryProps>, res: NextApiRe
|
||||
},
|
||||
{ session }
|
||||
);
|
||||
await MongoChat.findOneAndRemove(
|
||||
await MongoChat.deleteOne(
|
||||
{
|
||||
appId,
|
||||
chatId
|
||||
|
@@ -41,6 +41,7 @@ async function handler(
|
||||
/* start process */
|
||||
const { flowUsages, flowResponses, debugResponse } = await dispatchWorkFlow({
|
||||
res,
|
||||
requestOrigin: req.headers.origin,
|
||||
mode: 'debug',
|
||||
teamId,
|
||||
tmbId,
|
||||
@@ -50,6 +51,7 @@ async function handler(
|
||||
runtimeEdges: edges,
|
||||
variables,
|
||||
query: [],
|
||||
chatConfig: defaultApp.chatConfig,
|
||||
histories: [],
|
||||
stream: false,
|
||||
detail: true,
|
||||
|
@@ -249,6 +249,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (app.version === 'v2') {
|
||||
return dispatchWorkFlow({
|
||||
res,
|
||||
requestOrigin: req.headers.origin,
|
||||
mode: 'chat',
|
||||
user,
|
||||
teamId: String(teamId),
|
||||
@@ -260,6 +261,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
runtimeEdges: initWorkflowEdgeStatus(edges),
|
||||
variables: runtimeVariables,
|
||||
query: removeEmptyUserInput(userQuestion.value),
|
||||
chatConfig,
|
||||
histories: newHistories,
|
||||
stream,
|
||||
detail,
|
||||
|
@@ -27,9 +27,10 @@ const ChatTest = ({ appForm }: { appForm: AppSimpleEditFormType }) => {
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
const { nodes, edges } = form2AppWorkflow(appForm);
|
||||
const { nodes, edges } = form2AppWorkflow(appForm, t);
|
||||
// console.log(form2AppWorkflow(appForm, t));
|
||||
setWorkflowData({ nodes, edges });
|
||||
}, [appForm, setWorkflowData, allDatasets]);
|
||||
}, [appForm, setWorkflowData, allDatasets, t]);
|
||||
|
||||
const { restartChat, ChatContainer } = useChatTest({
|
||||
...workflowData,
|
||||
|
@@ -47,6 +47,7 @@ const ScheduledTriggerConfig = dynamic(
|
||||
() => import('@/components/core/app/ScheduledTriggerConfig')
|
||||
);
|
||||
const WelcomeTextConfig = dynamic(() => import('@/components/core/app/WelcomeTextConfig'));
|
||||
const FileSelectConfig = dynamic(() => import('@/components/core/app/FileSelect'));
|
||||
|
||||
const BoxStyles: BoxProps = {
|
||||
px: [4, 6],
|
||||
@@ -120,11 +121,11 @@ const EditForm = ({
|
||||
[appForm.chatConfig.variables, t]
|
||||
);
|
||||
|
||||
const selectedModel =
|
||||
llmModelList.find((item) => item.model === appForm.aiSettings.model) ?? llmModelList[0];
|
||||
const tokenLimit = useMemo(() => {
|
||||
return (
|
||||
llmModelList.find((item) => item.model === appForm.aiSettings.model)?.quoteMaxToken || 3000
|
||||
);
|
||||
}, [llmModelList, appForm.aiSettings.model]);
|
||||
return selectedModel.quoteMaxToken || 3000;
|
||||
}, [selectedModel.quoteMaxToken]);
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -338,6 +339,23 @@ const EditForm = ({
|
||||
</Grid>
|
||||
</Box>
|
||||
|
||||
{/* File select */}
|
||||
<Box {...BoxStyles}>
|
||||
<FileSelectConfig
|
||||
forbidVision={!selectedModel.vision}
|
||||
value={appForm.chatConfig.fileSelectConfig}
|
||||
onChange={(e) => {
|
||||
setAppForm((state) => ({
|
||||
...state,
|
||||
chatConfig: {
|
||||
...state.chatConfig,
|
||||
fileSelectConfig: e
|
||||
}
|
||||
}));
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
{/* variable */}
|
||||
<Box {...BoxStyles}>
|
||||
<VariableEdit
|
||||
|
@@ -12,7 +12,6 @@ import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConf
|
||||
import { AppSimpleEditFormType } from '@fastgpt/global/core/app/type';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { form2AppWorkflow } from '@/web/core/app/utils';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { TabEnum } from '../context';
|
||||
import PublishHistoriesSlider, { type InitProps } from '../PublishHistoriesSlider';
|
||||
import { appWorkflow2Form } from '@fastgpt/global/core/app/utils';
|
||||
@@ -52,7 +51,7 @@ const Header = ({
|
||||
);
|
||||
|
||||
const isPublished = useMemo(() => {
|
||||
const data = form2AppWorkflow(appForm);
|
||||
const data = form2AppWorkflow(appForm, t);
|
||||
|
||||
return compareWorkflow(
|
||||
{
|
||||
@@ -66,11 +65,11 @@ const Header = ({
|
||||
chatConfig: data.chatConfig
|
||||
}
|
||||
);
|
||||
}, [appDetail.chatConfig, appDetail.modules, appForm]);
|
||||
}, [appDetail.chatConfig, appDetail.modules, appForm, t]);
|
||||
|
||||
const onSubmitPublish = useCallback(
|
||||
async (data: AppSimpleEditFormType) => {
|
||||
const { nodes, edges } = form2AppWorkflow(data);
|
||||
const { nodes, edges } = form2AppWorkflow(data, t);
|
||||
await onPublish({
|
||||
nodes,
|
||||
edges,
|
||||
@@ -78,7 +77,7 @@ const Header = ({
|
||||
type: AppTypeEnum.simple
|
||||
});
|
||||
},
|
||||
[onPublish]
|
||||
[onPublish, t]
|
||||
);
|
||||
|
||||
const [historiesDefaultData, setHistoriesDefaultData] = useState<InitProps>();
|
||||
@@ -119,9 +118,11 @@ const Header = ({
|
||||
: publishStatusStyle.unPublish.colorSchema
|
||||
}
|
||||
>
|
||||
{isPublished
|
||||
? publishStatusStyle.published.text
|
||||
: publishStatusStyle.unPublish.text}
|
||||
{t(
|
||||
isPublished
|
||||
? publishStatusStyle.published.text
|
||||
: publishStatusStyle.unPublish.text
|
||||
)}
|
||||
</MyTag>
|
||||
)}
|
||||
|
||||
@@ -133,7 +134,7 @@ const Header = ({
|
||||
w={'30px'}
|
||||
variant={'whitePrimary'}
|
||||
onClick={() => {
|
||||
const { nodes, edges } = form2AppWorkflow(appForm);
|
||||
const { nodes, edges } = form2AppWorkflow(appForm, t);
|
||||
setHistoriesDefaultData({
|
||||
nodes,
|
||||
edges,
|
||||
|
@@ -190,9 +190,11 @@ const AppCard = ({ showSaveStatus }: { showSaveStatus: boolean }) => {
|
||||
: publishStatusStyle.unPublish.colorSchema
|
||||
}
|
||||
>
|
||||
{isPublished
|
||||
? publishStatusStyle.published.text
|
||||
: publishStatusStyle.unPublish.text}
|
||||
{t(
|
||||
isPublished
|
||||
? publishStatusStyle.published.text
|
||||
: publishStatusStyle.unPublish.text
|
||||
)}
|
||||
</MyTag>
|
||||
</Flex>
|
||||
</MyTooltip>
|
||||
|
@@ -36,6 +36,7 @@ const nodeTypes: Record<FlowNodeTypeEnum, any> = {
|
||||
[FlowNodeTypeEnum.systemConfig]: dynamic(() => import('./nodes/NodeSystemConfig')),
|
||||
[FlowNodeTypeEnum.workflowStart]: dynamic(() => import('./nodes/NodeWorkflowStart')),
|
||||
[FlowNodeTypeEnum.chatNode]: NodeSimple,
|
||||
[FlowNodeTypeEnum.readFiles]: NodeSimple,
|
||||
[FlowNodeTypeEnum.datasetSearchNode]: NodeSimple,
|
||||
[FlowNodeTypeEnum.datasetConcatNode]: dynamic(() => import('./nodes/NodeDatasetConcat')),
|
||||
[FlowNodeTypeEnum.answerNode]: dynamic(() => import('./nodes/NodeAnswer')),
|
||||
|
@@ -174,7 +174,7 @@ function Reference({
|
||||
<>
|
||||
<Flex alignItems={'center'} mb={1}>
|
||||
<FormLabel required={input.required}>{input.label}</FormLabel>
|
||||
{input.description && <QuestionTip label={input.description}></QuestionTip>}
|
||||
{input.description && <QuestionTip ml={0.5} label={input.description}></QuestionTip>}
|
||||
{/* value */}
|
||||
<ValueTypeLabel valueType={input.valueType} />
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import React, { Dispatch, useMemo, useTransition } from 'react';
|
||||
import { NodeProps } from 'reactflow';
|
||||
import { Box, useTheme } from '@chakra-ui/react';
|
||||
import { Box } from '@chakra-ui/react';
|
||||
import { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
|
||||
import QGSwitch from '@/components/core/app/QGSwitch';
|
||||
@@ -19,6 +19,7 @@ import { useMemoizedFn } from 'ahooks';
|
||||
import VariableEdit from '@/components/core/app/VariableEdit';
|
||||
import { AppContext } from '@/pages/app/detail/components/context';
|
||||
import WelcomeTextConfig from '@/components/core/app/WelcomeTextConfig';
|
||||
import FileSelect from '@/components/core/app/FileSelect';
|
||||
|
||||
type ComponentProps = {
|
||||
chatConfig: AppChatConfigType;
|
||||
@@ -26,7 +27,6 @@ type ComponentProps = {
|
||||
};
|
||||
|
||||
const NodeUserGuide = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
||||
const theme = useTheme();
|
||||
const { appDetail, setAppDetail } = useContextSelector(AppContext, (v) => v);
|
||||
|
||||
const chatConfig = useMemo<AppChatConfigType>(() => {
|
||||
@@ -63,19 +63,22 @@ const NodeUserGuide = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
||||
<Box pt={4}>
|
||||
<ChatStartVariable {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={theme.borders.base}>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<FileSelectConfig {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<TTSGuide {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={theme.borders.base}>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<WhisperGuide {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={theme.borders.base}>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<QuestionGuide {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={theme.borders.base}>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<ScheduledTrigger {...componentsProps} />
|
||||
</Box>
|
||||
<Box mt={3} pt={3} borderTop={theme.borders.base}>
|
||||
<Box mt={3} pt={3} borderTop={'base'}>
|
||||
<QuestionInputGuide {...componentsProps} />
|
||||
</Box>
|
||||
</Box>
|
||||
@@ -219,3 +222,20 @@ function QuestionInputGuide({ chatConfig: { chatInputGuide }, setAppDetail }: Co
|
||||
/>
|
||||
) : null;
|
||||
}
|
||||
|
||||
function FileSelectConfig({ chatConfig: { fileSelectConfig }, setAppDetail }: ComponentProps) {
|
||||
return (
|
||||
<FileSelect
|
||||
value={fileSelectConfig}
|
||||
onChange={(e) => {
|
||||
setAppDetail((state) => ({
|
||||
...state,
|
||||
chatConfig: {
|
||||
...state.chatConfig,
|
||||
fileSelectConfig: e
|
||||
}
|
||||
}));
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import React, { useMemo } from 'react';
|
||||
import React, { useEffect, useMemo } from 'react';
|
||||
import { NodeProps } from 'reactflow';
|
||||
import NodeCard from './render/NodeCard';
|
||||
import { FlowNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
@@ -14,11 +14,13 @@ import { FlowNodeOutputItemType } from '@fastgpt/global/core/workflow/type/io';
|
||||
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { AppContext } from '@/pages/app/detail/components/context';
|
||||
import { userFilesInput } from '@fastgpt/global/core/workflow/template/system/workflowStart';
|
||||
|
||||
const NodeStart = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
||||
const { t } = useTranslation();
|
||||
const { nodeId, outputs } = data;
|
||||
const nodeList = useContextSelector(WorkflowContext, (v) => v.nodeList);
|
||||
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
|
||||
const { appDetail } = useContextSelector(AppContext, (v) => v);
|
||||
|
||||
const variablesOutputs = useCreation(() => {
|
||||
@@ -38,6 +40,30 @@ const NodeStart = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
|
||||
}));
|
||||
}, [nodeList, t]);
|
||||
|
||||
// Dynamic add or delete userFilesInput
|
||||
useEffect(() => {
|
||||
const canUploadFiles =
|
||||
appDetail.chatConfig?.fileSelectConfig?.canSelectFile ||
|
||||
appDetail.chatConfig?.fileSelectConfig?.canSelectImg;
|
||||
const repeatKey = outputs.find((item) => item.key === userFilesInput.key);
|
||||
|
||||
if (canUploadFiles) {
|
||||
!repeatKey &&
|
||||
onChangeNode({
|
||||
nodeId,
|
||||
type: 'addOutput',
|
||||
value: userFilesInput
|
||||
});
|
||||
} else {
|
||||
repeatKey &&
|
||||
onChangeNode({
|
||||
nodeId,
|
||||
type: 'delOutput',
|
||||
key: userFilesInput.key
|
||||
});
|
||||
}
|
||||
}, [appDetail.chatConfig?.fileSelectConfig, nodeId, onChangeNode, outputs]);
|
||||
|
||||
return (
|
||||
<NodeCard
|
||||
minW={'240px'}
|
||||
|
@@ -84,6 +84,8 @@ const InputLabel = ({ nodeId, input }: Props) => {
|
||||
);
|
||||
}, [
|
||||
description,
|
||||
input.renderTypeList,
|
||||
input.selectedTypeIndex,
|
||||
label,
|
||||
onChangeRenderType,
|
||||
renderTypeList,
|
||||
|
@@ -36,9 +36,10 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
|
||||
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatMaxToken)?.value ?? 2048,
|
||||
temperature:
|
||||
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatTemperature)?.value ?? 1,
|
||||
isResponseAnswerText: inputs.find(
|
||||
(input) => input.key === NodeInputKeyEnum.aiChatIsResponseText
|
||||
)?.value
|
||||
isResponseAnswerText:
|
||||
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatIsResponseText)?.value ?? true,
|
||||
aiChatVision:
|
||||
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatVision)?.value ?? true
|
||||
}),
|
||||
[inputs]
|
||||
);
|
||||
|
@@ -35,7 +35,7 @@ const OutputLabel = ({ nodeId, output }: { nodeId: string; output: FlowNodeOutpu
|
||||
>
|
||||
{t(label as any)}
|
||||
</Box>
|
||||
{description && <QuestionTip label={t(description as any)} />}
|
||||
{description && <QuestionTip ml={1} label={t(description as any)} />}
|
||||
<ValueTypeLabel valueType={valueType} />
|
||||
</Flex>
|
||||
{output.type === FlowNodeOutputTypeEnum.source && (
|
||||
|
@@ -523,7 +523,7 @@ const WorkflowContextProvider = ({
|
||||
version: 'v2'
|
||||
});
|
||||
setSaveLabel(
|
||||
t('core.app.Saved time', {
|
||||
t('common:core.app.Saved time', {
|
||||
time: formatTime2HM()
|
||||
})
|
||||
);
|
||||
|
@@ -51,7 +51,8 @@ export const useChatTest = ({
|
||||
edges: initWorkflowEdgeStatus(edges),
|
||||
variables,
|
||||
appId: appDetail._id,
|
||||
appName: `调试-${appDetail.name}`
|
||||
appName: `调试-${appDetail.name}`,
|
||||
chatConfig
|
||||
},
|
||||
onMessage: generatingMessage,
|
||||
abortCtrl: controller
|
||||
@@ -99,7 +100,6 @@ export const useChatTest = ({
|
||||
userAvatar={userInfo?.avatar}
|
||||
showMarkIcon
|
||||
chatConfig={chatConfig}
|
||||
showFileSelector={checkChatSupportSelectFileByModules(nodes)}
|
||||
onStartChat={startChat}
|
||||
onDelMessage={() => {}}
|
||||
/>
|
||||
|
@@ -255,7 +255,6 @@ const Chat = ({
|
||||
appAvatar={chatData.app.avatar}
|
||||
userAvatar={userInfo?.avatar}
|
||||
chatConfig={chatData.app?.chatConfig}
|
||||
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
|
||||
feedbackType={'user'}
|
||||
onStartChat={onStartChat}
|
||||
onDelMessage={({ contentId }) => delChatRecordById({ contentId, appId, chatId })}
|
||||
@@ -339,7 +338,7 @@ export async function getServerSideProps(context: any) {
|
||||
props: {
|
||||
appId: context?.query?.appId || '',
|
||||
chatId: context?.query?.chatId || '',
|
||||
...(await serviceSideProps(context, ['file', 'app']))
|
||||
...(await serviceSideProps(context, ['file', 'app', 'chat']))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -318,7 +318,6 @@ const OutLink = ({ appName, appIntro, appAvatar }: Props) => {
|
||||
appAvatar={chatData.app.avatar}
|
||||
userAvatar={chatData.userAvatar}
|
||||
chatConfig={chatData.app?.chatConfig}
|
||||
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
|
||||
feedbackType={'user'}
|
||||
onStartChat={startChat}
|
||||
onDelMessage={({ contentId }) =>
|
||||
@@ -395,7 +394,7 @@ export async function getServerSideProps(context: any) {
|
||||
appIntro: app?.appId?.intro ?? 'intro',
|
||||
shareId: shareId ?? '',
|
||||
authToken: authToken ?? '',
|
||||
...(await serviceSideProps(context, ['file', 'app']))
|
||||
...(await serviceSideProps(context, ['file', 'app', 'chat']))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -252,7 +252,6 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
|
||||
appAvatar={chatData.app.avatar}
|
||||
userAvatar={chatData.userAvatar}
|
||||
chatConfig={chatData.app?.chatConfig}
|
||||
showFileSelector={checkChatSupportSelectFileByChatModels(chatData.app.chatModels)}
|
||||
feedbackType={'user'}
|
||||
onStartChat={startChat}
|
||||
onDelMessage={({ contentId }) =>
|
||||
@@ -338,7 +337,7 @@ export async function getServerSideProps(context: any) {
|
||||
chatId: context?.query?.chatId || '',
|
||||
teamId: context?.query?.teamId || '',
|
||||
teamToken: context?.query?.teamToken || '',
|
||||
...(await serviceSideProps(context, ['file', 'app']))
|
||||
...(await serviceSideProps(context, ['file', 'app', 'chat']))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@@ -89,7 +89,7 @@ const FileSelector = ({
|
||||
// upload file
|
||||
await Promise.all(
|
||||
files.map(async ({ fileId, file }) => {
|
||||
const uploadFileId = await uploadFile2DB({
|
||||
const { fileId: uploadFileId } = await uploadFile2DB({
|
||||
file,
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
percentListen: (e) => {
|
||||
@@ -230,7 +230,7 @@ const FileSelector = ({
|
||||
let isErr = files.some((item) => item.type === '');
|
||||
if (isErr) {
|
||||
return toast({
|
||||
title: fileT('upload_error_description'),
|
||||
title: t('file:upload_error_description'),
|
||||
status: 'error'
|
||||
});
|
||||
}
|
||||
|
@@ -54,7 +54,7 @@ const LoginForm = ({ setPageType, loginSuccess }: Props) => {
|
||||
}
|
||||
setRequesting(false);
|
||||
},
|
||||
[loginSuccess, toast]
|
||||
[loginSuccess, t, toast]
|
||||
);
|
||||
|
||||
const isCommunityVersion = feConfigs?.show_register === false && !feConfigs?.isPlus;
|
||||
|
@@ -129,7 +129,7 @@ const Login = () => {
|
||||
|
||||
export async function getServerSideProps(context: any) {
|
||||
return {
|
||||
props: { ...(await serviceSideProps(context, ['app'])) }
|
||||
props: { ...(await serviceSideProps(context, ['app', 'user'])) }
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,12 @@
|
||||
import { setCron } from '@fastgpt/service/common/system/cron';
|
||||
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
|
||||
import { clearTmpUploadFiles } from '@fastgpt/service/common/file/utils';
|
||||
import { checkInvalidDatasetFiles, checkInvalidDatasetData, checkInvalidVector } from './cronTask';
|
||||
import {
|
||||
checkInvalidDatasetFiles,
|
||||
checkInvalidDatasetData,
|
||||
checkInvalidVector,
|
||||
removeExpiredChatFiles
|
||||
} from './cronTask';
|
||||
import { checkTimerLock } from '@fastgpt/service/common/system/timerLock/utils';
|
||||
import { TimerIdEnum } from '@fastgpt/service/common/system/timerLock/constants';
|
||||
import { addHours } from 'date-fns';
|
||||
@@ -28,7 +33,8 @@ const clearInvalidDataCron = () => {
|
||||
lockMinuted: 59
|
||||
})
|
||||
) {
|
||||
checkInvalidDatasetFiles(addHours(new Date(), -6), addHours(new Date(), -2));
|
||||
await checkInvalidDatasetFiles(addHours(new Date(), -6), addHours(new Date(), -2));
|
||||
removeExpiredChatFiles();
|
||||
}
|
||||
});
|
||||
|
||||
|
@@ -1,3 +1,4 @@
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import {
|
||||
delFileByFileIdList,
|
||||
getGFSCollection
|
||||
@@ -11,15 +12,16 @@ import {
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { addDays } from 'date-fns';
|
||||
|
||||
/*
|
||||
check dataset.files data. If there is no match in dataset.collections, delete it
|
||||
可能异常情况
|
||||
可能异常情况:
|
||||
1. 上传了文件,未成功创建集合
|
||||
*/
|
||||
export async function checkInvalidDatasetFiles(start: Date, end: Date) {
|
||||
let deleteFileAmount = 0;
|
||||
const collection = getGFSCollection('dataset');
|
||||
const collection = getGFSCollection(BucketNameEnum.dataset);
|
||||
const where = {
|
||||
uploadDate: { $gte: start, $lte: end }
|
||||
};
|
||||
@@ -46,7 +48,10 @@ export async function checkInvalidDatasetFiles(start: Date, end: Date) {
|
||||
|
||||
// 3. if not found, delete file
|
||||
if (hasCollection === 0) {
|
||||
await delFileByFileIdList({ bucketName: 'dataset', fileIdList: [String(file._id)] });
|
||||
await delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileIdList: [String(file._id)]
|
||||
});
|
||||
console.log('delete file', file._id);
|
||||
deleteFileAmount++;
|
||||
}
|
||||
@@ -59,6 +64,35 @@ export async function checkInvalidDatasetFiles(start: Date, end: Date) {
|
||||
addLog.info(`Clear invalid dataset files finish, remove ${deleteFileAmount} files`);
|
||||
}
|
||||
|
||||
/*
|
||||
Remove 7 days ago chat files
|
||||
*/
|
||||
export const removeExpiredChatFiles = async () => {
|
||||
let deleteFileAmount = 0;
|
||||
const collection = getGFSCollection(BucketNameEnum.chat);
|
||||
const where = {
|
||||
uploadDate: { $lte: addDays(new Date(), -7) }
|
||||
};
|
||||
|
||||
// get all file _id
|
||||
const files = await collection.find(where, { projection: { _id: 1 } }).toArray();
|
||||
|
||||
// Delete file one by one
|
||||
for await (const file of files) {
|
||||
try {
|
||||
await delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.chat,
|
||||
fileIdList: [String(file._id)]
|
||||
});
|
||||
deleteFileAmount++;
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
}
|
||||
|
||||
addLog.info(`Remove expired chat files finish, remove ${deleteFileAmount} files`);
|
||||
};
|
||||
|
||||
/*
|
||||
检测无效的 Mongo 数据
|
||||
异常情况:
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import { getUserChatInfoAndAuthTeamPoints } from '@/service/support/permission/auth/team';
|
||||
import { defaultApp } from '@/web/core/app/constants';
|
||||
import { getNextTimeByCronStringAndTimezone } from '@fastgpt/global/common/string/time';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
@@ -46,6 +47,7 @@ export const getScheduleTriggerApp = async () => {
|
||||
}
|
||||
}
|
||||
],
|
||||
chatConfig: defaultApp.chatConfig,
|
||||
histories: [],
|
||||
stream: false,
|
||||
detail: false,
|
||||
|
@@ -14,6 +14,7 @@ import { checkInvalidChunkAndLock } from '@fastgpt/service/core/dataset/training
|
||||
import { addMinutes } from 'date-fns';
|
||||
import { countGptMessagesTokens } from '@fastgpt/service/common/string/tiktoken/index';
|
||||
import { pushDataListToTrainingQueueByCollectionId } from '@fastgpt/service/core/dataset/training/controller';
|
||||
import { loadRequestMessages } from '@fastgpt/service/core/chat/utils';
|
||||
|
||||
const reduceQueue = () => {
|
||||
global.qaQueueLen = global.qaQueueLen > 0 ? global.qaQueueLen - 1 : 0;
|
||||
@@ -113,7 +114,7 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
|
||||
const chatResponse = await ai.chat.completions.create({
|
||||
model,
|
||||
temperature: 0.3,
|
||||
messages,
|
||||
messages: await loadRequestMessages({ messages, useVision: false }),
|
||||
stream: false
|
||||
});
|
||||
const answer = chatResponse.choices?.[0].message?.content || '';
|
||||
|
@@ -9,7 +9,10 @@ export const postUploadFiles = (
|
||||
data: FormData,
|
||||
onUploadProgress: (progressEvent: AxiosProgressEvent) => void
|
||||
) =>
|
||||
POST<string>('/common/file/upload', data, {
|
||||
POST<{
|
||||
fileId: string;
|
||||
previewUrl: string;
|
||||
}>('/common/file/upload', data, {
|
||||
timeout: 600000,
|
||||
onUploadProgress,
|
||||
headers: {
|
||||
|
@@ -7,7 +7,6 @@ import {
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
import {
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeOutputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { NodeInputKeyEnum, WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
@@ -18,32 +17,45 @@ import { EditorVariablePickerType } from '@fastgpt/web/components/common/Textare
|
||||
import { TFunction } from 'next-i18next';
|
||||
import { ToolModule } from '@fastgpt/global/core/workflow/template/system/tools';
|
||||
import { useDatasetStore } from '../dataset/store/dataset';
|
||||
import {
|
||||
WorkflowStart,
|
||||
userFilesInput
|
||||
} from '@fastgpt/global/core/workflow/template/system/workflowStart';
|
||||
import { SystemConfigNode } from '@fastgpt/global/core/workflow/template/system/systemConfig';
|
||||
import { AiChatModule } from '@fastgpt/global/core/workflow/template/system/aiChat';
|
||||
import { DatasetSearchModule } from '@fastgpt/global/core/workflow/template/system/datasetSearch';
|
||||
import { ReadFilesNodes } from '@fastgpt/global/core/workflow/template/system/readFiles';
|
||||
|
||||
type WorkflowType = {
|
||||
nodes: StoreNodeItemType[];
|
||||
edges: StoreEdgeItemType[];
|
||||
};
|
||||
export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
export function form2AppWorkflow(
|
||||
data: AppSimpleEditFormType,
|
||||
t: any // i18nT
|
||||
): WorkflowType & {
|
||||
chatConfig: AppChatConfigType;
|
||||
} {
|
||||
const workflowStartNodeId = 'workflowStartNodeId';
|
||||
const datasetNodeId = 'iKBoX2vIzETU';
|
||||
const aiChatNodeId = '7BdojPlukIQw';
|
||||
|
||||
const allDatasets = useDatasetStore.getState().allDatasets;
|
||||
const selectedDatasets = data.dataset.datasets.filter((item) =>
|
||||
allDatasets.some((ds) => ds._id === item.datasetId)
|
||||
);
|
||||
|
||||
function systemConfigTemplate(formData: AppSimpleEditFormType): StoreNodeItemType {
|
||||
function systemConfigTemplate(): StoreNodeItemType {
|
||||
return {
|
||||
nodeId: 'userGuide',
|
||||
name: '系统配置',
|
||||
intro: '可以配置应用的系统参数',
|
||||
flowNodeType: FlowNodeTypeEnum.systemConfig,
|
||||
nodeId: SystemConfigNode.id,
|
||||
name: t(SystemConfigNode.name),
|
||||
intro: '',
|
||||
flowNodeType: SystemConfigNode.flowNodeType,
|
||||
position: {
|
||||
x: 531.2422736065552,
|
||||
y: -486.7611729549753
|
||||
},
|
||||
version: '481',
|
||||
version: SystemConfigNode.version,
|
||||
inputs: [],
|
||||
outputs: []
|
||||
};
|
||||
@@ -51,509 +63,259 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
function workflowStartTemplate(): StoreNodeItemType {
|
||||
return {
|
||||
nodeId: workflowStartNodeId,
|
||||
name: '流程开始',
|
||||
name: t(WorkflowStart.name),
|
||||
intro: '',
|
||||
avatar: '/imgs/workflow/userChatInput.svg',
|
||||
flowNodeType: FlowNodeTypeEnum.workflowStart,
|
||||
avatar: WorkflowStart.avatar,
|
||||
flowNodeType: WorkflowStart.flowNodeType,
|
||||
position: {
|
||||
x: 558.4082376415505,
|
||||
y: 123.72387429194112
|
||||
},
|
||||
version: '481',
|
||||
version: WorkflowStart.version,
|
||||
inputs: WorkflowStart.inputs,
|
||||
outputs: [...WorkflowStart.outputs, userFilesInput]
|
||||
};
|
||||
}
|
||||
function aiChatTemplate(formData: AppSimpleEditFormType): StoreNodeItemType {
|
||||
return {
|
||||
nodeId: aiChatNodeId,
|
||||
name: t(AiChatModule.name),
|
||||
intro: t(AiChatModule.intro),
|
||||
avatar: AiChatModule.avatar,
|
||||
flowNodeType: AiChatModule.flowNodeType,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 1106.3238387960757,
|
||||
y: -350.6030674683474
|
||||
},
|
||||
version: AiChatModule.version,
|
||||
inputs: [
|
||||
{
|
||||
key: 'model',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.settingLLMModel, FlowNodeInputTypeEnum.reference],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.aiSettings.model
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.temperature,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 10,
|
||||
step: 1
|
||||
},
|
||||
{
|
||||
key: 'maxToken',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.maxToken,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 100,
|
||||
max: 4000,
|
||||
step: 50
|
||||
},
|
||||
{
|
||||
key: 'isResponseAnswerText',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: true,
|
||||
valueType: WorkflowIOValueTypeEnum.boolean
|
||||
},
|
||||
{
|
||||
key: 'quoteTemplate',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'quotePrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.textarea, FlowNodeInputTypeEnum.reference],
|
||||
max: 3000,
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: 'core.ai.Prompt',
|
||||
description: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
placeholder: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
value: formData.aiSettings.systemPrompt
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.numberInput, FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
label: 'core.module.input.label.chat history',
|
||||
required: true,
|
||||
min: 0,
|
||||
max: 30,
|
||||
value: formData.aiSettings.maxHistories
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '用户问题'
|
||||
toolDescription: '用户问题',
|
||||
value: [workflowStartNodeId, 'userChatInput']
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.settingDatasetQuotePrompt],
|
||||
label: '',
|
||||
debugLabel: '知识库引用',
|
||||
description: '',
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote,
|
||||
value: selectedDatasets ? [datasetNodeId, 'quoteQA'] : undefined
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatVision,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
outputs: AiChatModule.outputs
|
||||
};
|
||||
}
|
||||
function datasetNodeTemplate(formData: AppSimpleEditFormType, question: any): StoreNodeItemType {
|
||||
return {
|
||||
nodeId: datasetNodeId,
|
||||
name: t(DatasetSearchModule.name),
|
||||
intro: t(DatasetSearchModule.intro),
|
||||
avatar: DatasetSearchModule.avatar,
|
||||
flowNodeType: DatasetSearchModule.flowNodeType,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 918.5901682164496,
|
||||
y: -227.11542247619582
|
||||
},
|
||||
version: '481',
|
||||
inputs: [
|
||||
{
|
||||
id: 'userChatInput',
|
||||
key: 'userChatInput',
|
||||
label: 'core.module.input.label.user question',
|
||||
key: 'datasets',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.selectDataset, FlowNodeInputTypeEnum.reference],
|
||||
label: 'core.module.input.label.Select dataset',
|
||||
value: selectedDatasets,
|
||||
valueType: WorkflowIOValueTypeEnum.selectDataset,
|
||||
list: [],
|
||||
required: true
|
||||
},
|
||||
{
|
||||
key: 'similarity',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.selectDatasetParamsModal],
|
||||
label: '',
|
||||
value: formData.dataset.similarity,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'limit',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.dataset.limit,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'searchMode',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
value: formData.dataset.searchMode
|
||||
},
|
||||
{
|
||||
key: 'usingReRank',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.usingReRank
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchUsingExtensionQuery',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.datasetSearchUsingExtensionQuery
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionModel',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionModel
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionBg',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionBg
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '需要检索的内容',
|
||||
value: question
|
||||
}
|
||||
]
|
||||
],
|
||||
outputs: DatasetSearchModule.outputs
|
||||
};
|
||||
}
|
||||
|
||||
// Start, AiChat
|
||||
function simpleChatTemplate(formData: AppSimpleEditFormType): WorkflowType {
|
||||
return {
|
||||
nodes: [
|
||||
{
|
||||
nodeId: '7BdojPlukIQw',
|
||||
name: 'AI 对话',
|
||||
intro: 'AI 大模型对话',
|
||||
avatar: '/imgs/workflow/AI.png',
|
||||
flowNodeType: FlowNodeTypeEnum.chatNode,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 1106.3238387960757,
|
||||
y: -350.6030674683474
|
||||
},
|
||||
version: '481',
|
||||
inputs: [
|
||||
{
|
||||
key: 'model',
|
||||
renderTypeList: [
|
||||
FlowNodeInputTypeEnum.settingLLMModel,
|
||||
FlowNodeInputTypeEnum.reference
|
||||
],
|
||||
label: 'core.module.input.label.aiModel',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.aiSettings.model
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.temperature,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 10,
|
||||
step: 1
|
||||
},
|
||||
{
|
||||
key: 'maxToken',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.maxToken,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 100,
|
||||
max: 4000,
|
||||
step: 50
|
||||
},
|
||||
{
|
||||
key: 'isResponseAnswerText',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: true,
|
||||
valueType: WorkflowIOValueTypeEnum.boolean
|
||||
},
|
||||
{
|
||||
key: 'quoteTemplate',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'quotePrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.textarea, FlowNodeInputTypeEnum.reference],
|
||||
max: 3000,
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: 'core.ai.Prompt',
|
||||
description: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
placeholder: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
value: formData.aiSettings.systemPrompt
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.numberInput, FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
label: 'core.module.input.label.chat history',
|
||||
required: true,
|
||||
min: 0,
|
||||
max: 30,
|
||||
value: formData.aiSettings.maxHistories
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '用户问题',
|
||||
value: [workflowStartNodeId, 'userChatInput']
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.settingDatasetQuotePrompt],
|
||||
label: '',
|
||||
debugLabel: '知识库引用',
|
||||
description: '',
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: 'history',
|
||||
key: 'history',
|
||||
label: 'core.module.output.label.New context',
|
||||
description: 'core.module.output.description.New context',
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
},
|
||||
{
|
||||
id: 'answerText',
|
||||
key: 'answerText',
|
||||
label: 'core.module.output.label.Ai response content',
|
||||
description: 'core.module.output.description.Ai response content',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
nodes: [aiChatTemplate(formData)],
|
||||
edges: [
|
||||
{
|
||||
source: workflowStartNodeId,
|
||||
target: '7BdojPlukIQw',
|
||||
target: aiChatNodeId,
|
||||
sourceHandle: `${workflowStartNodeId}-source-right`,
|
||||
targetHandle: '7BdojPlukIQw-target-left'
|
||||
targetHandle: `${aiChatNodeId}-target-left`
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
// Start, Dataset search, AiChat
|
||||
function datasetTemplate(formData: AppSimpleEditFormType): WorkflowType {
|
||||
return {
|
||||
nodes: [
|
||||
{
|
||||
nodeId: '7BdojPlukIQw',
|
||||
name: 'AI 对话',
|
||||
intro: 'AI 大模型对话',
|
||||
avatar: '/imgs/workflow/AI.png',
|
||||
flowNodeType: FlowNodeTypeEnum.chatNode,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 1638.509551404687,
|
||||
y: -341.0428450861567
|
||||
},
|
||||
version: '481', // [FlowNodeTypeEnum.chatNode]
|
||||
inputs: [
|
||||
{
|
||||
key: 'model',
|
||||
renderTypeList: [
|
||||
FlowNodeInputTypeEnum.settingLLMModel,
|
||||
FlowNodeInputTypeEnum.reference
|
||||
],
|
||||
label: 'core.module.input.label.aiModel',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.aiSettings.model
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.temperature,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 10,
|
||||
step: 1
|
||||
},
|
||||
{
|
||||
key: 'maxToken',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.aiSettings.maxToken,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 100,
|
||||
max: 4000,
|
||||
step: 50
|
||||
},
|
||||
{
|
||||
key: 'isResponseAnswerText',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: true,
|
||||
valueType: WorkflowIOValueTypeEnum.boolean
|
||||
},
|
||||
{
|
||||
key: 'quoteTemplate',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'quotePrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.textarea, FlowNodeInputTypeEnum.reference],
|
||||
max: 3000,
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: 'core.ai.Prompt',
|
||||
description: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
placeholder: 'core.app.tip.chatNodeSystemPromptTip',
|
||||
value: formData.aiSettings.systemPrompt
|
||||
},
|
||||
{
|
||||
key: 'history',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.numberInput, FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
label: 'core.module.input.label.chat history',
|
||||
required: true,
|
||||
min: 0,
|
||||
max: 30,
|
||||
value: formData.aiSettings.maxHistories
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '用户问题',
|
||||
value: [workflowStartNodeId, 'userChatInput']
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.settingDatasetQuotePrompt],
|
||||
label: '',
|
||||
debugLabel: '知识库引用',
|
||||
description: '',
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote,
|
||||
value: ['iKBoX2vIzETU', 'quoteQA']
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: 'history',
|
||||
key: 'history',
|
||||
label: 'core.module.output.label.New context',
|
||||
description: 'core.module.output.description.New context',
|
||||
valueType: WorkflowIOValueTypeEnum.chatHistory,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
},
|
||||
{
|
||||
id: 'answerText',
|
||||
key: 'answerText',
|
||||
label: 'core.module.output.label.Ai response content',
|
||||
description: 'core.module.output.description.Ai response content',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
nodeId: 'iKBoX2vIzETU',
|
||||
name: '知识库搜索',
|
||||
intro: '调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容',
|
||||
avatar: '/imgs/workflow/db.png',
|
||||
flowNodeType: FlowNodeTypeEnum.datasetSearchNode,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 918.5901682164496,
|
||||
y: -227.11542247619582
|
||||
},
|
||||
version: '481',
|
||||
inputs: [
|
||||
{
|
||||
key: 'datasets',
|
||||
renderTypeList: [
|
||||
FlowNodeInputTypeEnum.selectDataset,
|
||||
FlowNodeInputTypeEnum.reference
|
||||
],
|
||||
label: 'core.module.input.label.Select dataset',
|
||||
value: selectedDatasets,
|
||||
valueType: WorkflowIOValueTypeEnum.selectDataset,
|
||||
list: [],
|
||||
required: true
|
||||
},
|
||||
{
|
||||
key: 'similarity',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.selectDatasetParamsModal],
|
||||
label: '',
|
||||
value: formData.dataset.similarity,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'limit',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.dataset.limit,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'searchMode',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.searchMode
|
||||
},
|
||||
{
|
||||
key: 'usingReRank',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.usingReRank
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchUsingExtensionQuery',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.datasetSearchUsingExtensionQuery
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionModel',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionModel
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionBg',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionBg
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '需要检索的内容',
|
||||
value: [workflowStartNodeId, 'userChatInput']
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: 'quoteQA',
|
||||
key: 'quoteQA',
|
||||
label: 'core.module.Dataset quote.label',
|
||||
type: FlowNodeOutputTypeEnum.static,
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote
|
||||
}
|
||||
]
|
||||
}
|
||||
aiChatTemplate(formData),
|
||||
datasetNodeTemplate(formData, [workflowStartNodeId, 'userChatInput'])
|
||||
],
|
||||
edges: [
|
||||
{
|
||||
source: workflowStartNodeId,
|
||||
target: 'iKBoX2vIzETU',
|
||||
target: datasetNodeId,
|
||||
sourceHandle: `${workflowStartNodeId}-source-right`,
|
||||
targetHandle: 'iKBoX2vIzETU-target-left'
|
||||
targetHandle: `${datasetNodeId}-target-left`
|
||||
},
|
||||
{
|
||||
source: 'iKBoX2vIzETU',
|
||||
target: '7BdojPlukIQw',
|
||||
sourceHandle: 'iKBoX2vIzETU-source-right',
|
||||
targetHandle: '7BdojPlukIQw-target-left'
|
||||
source: datasetNodeId,
|
||||
target: aiChatNodeId,
|
||||
sourceHandle: `${datasetNodeId}-source-right`,
|
||||
targetHandle: `${aiChatNodeId}-target-left`
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
function toolTemplates(formData: AppSimpleEditFormType): WorkflowType {
|
||||
const toolNodeId = getNanoid(6);
|
||||
const datasetNodeId = getNanoid(6);
|
||||
|
||||
// Dataset tool config
|
||||
const datasetTool: WorkflowType | null =
|
||||
selectedDatasets.length > 0
|
||||
? {
|
||||
nodes: [
|
||||
{
|
||||
nodeId: datasetNodeId,
|
||||
name: '知识库搜索',
|
||||
intro: '调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容',
|
||||
avatar: '/imgs/workflow/db.png',
|
||||
flowNodeType: FlowNodeTypeEnum.datasetSearchNode,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 500,
|
||||
y: 545
|
||||
},
|
||||
version: '481',
|
||||
inputs: [
|
||||
{
|
||||
key: 'datasets',
|
||||
renderTypeList: [
|
||||
FlowNodeInputTypeEnum.selectDataset,
|
||||
FlowNodeInputTypeEnum.reference
|
||||
],
|
||||
label: 'core.module.input.label.Select dataset',
|
||||
value: selectedDatasets,
|
||||
valueType: WorkflowIOValueTypeEnum.selectDataset,
|
||||
list: [],
|
||||
required: true
|
||||
},
|
||||
{
|
||||
key: 'similarity',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.selectDatasetParamsModal],
|
||||
label: '',
|
||||
value: formData.dataset.similarity,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'limit',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
value: formData.dataset.limit,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: 'searchMode',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.searchMode
|
||||
},
|
||||
{
|
||||
key: 'usingReRank',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.usingReRank
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchUsingExtensionQuery',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: formData.dataset.datasetSearchUsingExtensionQuery
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionModel',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionModel
|
||||
},
|
||||
{
|
||||
key: 'datasetSearchExtensionBg',
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
value: formData.dataset.datasetSearchExtensionBg
|
||||
},
|
||||
{
|
||||
key: 'userChatInput',
|
||||
renderTypeList: [
|
||||
FlowNodeInputTypeEnum.reference,
|
||||
FlowNodeInputTypeEnum.textarea
|
||||
],
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
toolDescription: '需要检索的内容'
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: 'quoteQA',
|
||||
key: 'quoteQA',
|
||||
label: 'core.module.Dataset quote.label',
|
||||
type: FlowNodeOutputTypeEnum.static,
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
nodes: [datasetNodeTemplate(formData, '')],
|
||||
edges: [
|
||||
{
|
||||
source: toolNodeId,
|
||||
@@ -564,7 +326,46 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
]
|
||||
}
|
||||
: null;
|
||||
// Read file tool config
|
||||
const readFileTool: WorkflowType | null = data.chatConfig.fileSelectConfig?.canSelectFile
|
||||
? {
|
||||
nodes: [
|
||||
{
|
||||
nodeId: ReadFilesNodes.id,
|
||||
name: t(ReadFilesNodes.name),
|
||||
intro: t(ReadFilesNodes.intro),
|
||||
avatar: ReadFilesNodes.avatar,
|
||||
flowNodeType: ReadFilesNodes.flowNodeType,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 974.6209854328943,
|
||||
y: 587.6378828744465
|
||||
},
|
||||
version: '489',
|
||||
inputs: [
|
||||
{
|
||||
key: NodeInputKeyEnum.fileUrlList,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.arrayString,
|
||||
label: t('app:workflow.file_url'),
|
||||
value: [workflowStartNodeId, 'userFiles']
|
||||
}
|
||||
],
|
||||
outputs: ReadFilesNodes.outputs
|
||||
}
|
||||
],
|
||||
edges: [
|
||||
{
|
||||
source: toolNodeId,
|
||||
target: ReadFilesNodes.id,
|
||||
sourceHandle: 'selectedTools',
|
||||
targetHandle: 'selectedTools'
|
||||
}
|
||||
]
|
||||
}
|
||||
: null;
|
||||
|
||||
// Computed tools config
|
||||
const pluginTool: WorkflowType[] = formData.selectedTools.map((tool, i) => {
|
||||
const nodeId = getNanoid(6);
|
||||
return {
|
||||
@@ -602,16 +403,16 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
nodes: [
|
||||
{
|
||||
nodeId: toolNodeId,
|
||||
name: '工具调用',
|
||||
intro: '通过AI模型自动选择一个或多个功能块进行调用,也可以对插件进行调用。',
|
||||
avatar: '/imgs/workflow/tool.svg',
|
||||
flowNodeType: FlowNodeTypeEnum.tools,
|
||||
name: ToolModule.name,
|
||||
intro: ToolModule.intro,
|
||||
avatar: ToolModule.avatar,
|
||||
flowNodeType: ToolModule.flowNodeType,
|
||||
showStatus: true,
|
||||
position: {
|
||||
x: 1062.1738942532802,
|
||||
y: -223.65033022650476
|
||||
},
|
||||
version: '481',
|
||||
version: ToolModule.version,
|
||||
inputs: [
|
||||
{
|
||||
key: 'model',
|
||||
@@ -671,12 +472,20 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
label: '用户问题',
|
||||
required: true,
|
||||
value: [workflowStartNodeId, 'userChatInput']
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatVision,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
}
|
||||
],
|
||||
outputs: ToolModule.outputs
|
||||
},
|
||||
// tool nodes
|
||||
...(datasetTool ? datasetTool.nodes : []),
|
||||
...(readFileTool ? readFileTool.nodes : []),
|
||||
...pluginTool.map((tool) => tool.nodes).flat()
|
||||
],
|
||||
edges: [
|
||||
@@ -688,6 +497,7 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
},
|
||||
// tool edges
|
||||
...(datasetTool ? datasetTool.edges : []),
|
||||
...(readFileTool ? readFileTool.edges : []),
|
||||
...pluginTool.map((tool) => tool.edges).flat()
|
||||
]
|
||||
};
|
||||
@@ -696,13 +506,14 @@ export function form2AppWorkflow(data: AppSimpleEditFormType): WorkflowType & {
|
||||
}
|
||||
|
||||
const workflow = (() => {
|
||||
if (data.selectedTools.length > 0) return toolTemplates(data);
|
||||
if (data.selectedTools.length > 0 || data.chatConfig.fileSelectConfig?.canSelectFile)
|
||||
return toolTemplates(data);
|
||||
if (selectedDatasets.length > 0) return datasetTemplate(data);
|
||||
return simpleChatTemplate(data);
|
||||
})();
|
||||
|
||||
return {
|
||||
nodes: [systemConfigTemplate(data), workflowStartTemplate(), ...workflow.nodes],
|
||||
nodes: [systemConfigTemplate(), workflowStartTemplate(), ...workflow.nodes],
|
||||
edges: workflow.edges,
|
||||
chatConfig: data.chatConfig
|
||||
};
|
||||
|
@@ -450,7 +450,8 @@ export const compareWorkflow = (workflow1: WorkflowType, workflow2: WorkflowType
|
||||
ttsConfig: clone1.chatConfig?.ttsConfig || undefined,
|
||||
whisperConfig: clone1.chatConfig?.whisperConfig || undefined,
|
||||
scheduledTriggerConfig: clone1.chatConfig?.scheduledTriggerConfig || undefined,
|
||||
chatInputGuide: clone1.chatConfig?.chatInputGuide || undefined
|
||||
chatInputGuide: clone1.chatConfig?.chatInputGuide || undefined,
|
||||
fileSelectConfig: clone1.chatConfig?.fileSelectConfig || undefined
|
||||
},
|
||||
{
|
||||
welcomeText: clone2.chatConfig?.welcomeText || '',
|
||||
@@ -459,7 +460,8 @@ export const compareWorkflow = (workflow1: WorkflowType, workflow2: WorkflowType
|
||||
ttsConfig: clone2.chatConfig?.ttsConfig || undefined,
|
||||
whisperConfig: clone2.chatConfig?.whisperConfig || undefined,
|
||||
scheduledTriggerConfig: clone2.chatConfig?.scheduledTriggerConfig || undefined,
|
||||
chatInputGuide: clone2.chatConfig?.chatInputGuide || undefined
|
||||
chatInputGuide: clone2.chatConfig?.chatInputGuide || undefined,
|
||||
fileSelectConfig: clone2.chatConfig?.fileSelectConfig || undefined
|
||||
}
|
||||
)
|
||||
) {
|
||||
|
Reference in New Issue
Block a user