feat: model select support close params;perf: dataset params slider;update doc (#3453)

* feat: model select support close params

* perf: dataset params slider

* update doc

* update doc

* add delete log

* perf: ai config overflow

* test

* test

* test

* delete collection tags

* delete collection tags
This commit is contained in:
Archer
2024-12-23 23:47:33 +08:00
committed by GitHub
parent a7f25994d5
commit f646ef8595
25 changed files with 256 additions and 188 deletions

View File

@@ -18,10 +18,10 @@ import {
Thead,
Tr,
Table,
useDisclosure
useDisclosure,
FlexProps
} from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MySlider from '@/components/Slider';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import { getDocPath } from '@/web/common/system/doc';
@@ -37,16 +37,33 @@ const AiPointsModal = dynamic(() =>
import('@/pages/price/components/Points').then((mod) => mod.AiPointsModal)
);
const FlexItemStyles: FlexProps = {
mt: 5,
alignItems: 'center',
h: '35px'
};
const LabelStyles: BoxProps = {
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
fontSize: 'sm',
color: 'myGray.900',
width: '9rem',
mr: 5
};
export type AIChatSettingsModalProps = {};
const AIChatSettingsModal = ({
onClose,
onSuccess,
defaultData,
llmModels = []
}: {
}: AIChatSettingsModalProps & {
onClose: () => void;
onSuccess: (e: SettingAIDataType) => void;
defaultData: SettingAIDataType;
llmModels?: LLMModelItemType[];
llmModels: LLMModelItemType[];
}) => {
const { t } = useTranslation();
const [refresh, setRefresh] = useState(false);
@@ -59,7 +76,11 @@ const AIChatSettingsModal = ({
const showResponseAnswerText = watch(NodeInputKeyEnum.aiChatIsResponseText) !== undefined;
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const maxToken = watch('maxToken');
const temperature = watch('temperature');
const useVision = watch('aiChatVision');
const selectedModel = getWebLLMModel(model);
const llmSupportVision = !!selectedModel?.vision;
@@ -79,14 +100,6 @@ const AIChatSettingsModal = ({
setRefresh(!refresh);
};
const LabelStyles: BoxProps = {
display: 'flex',
alignItems: 'center',
fontSize: 'sm',
color: 'myGray.900',
width: '7rem'
};
const {
isOpen: isOpenAiPointsModal,
onClose: onCloseAiPointsModal,
@@ -116,9 +129,9 @@ const AIChatSettingsModal = ({
}
w={'500px'}
>
<ModalBody overflowY={'auto'}>
<ModalBody overflowY={'auto'} overflowX={'hidden'}>
<Flex alignItems={'center'}>
<Box {...LabelStyles} mr={2}>
<Box {...LabelStyles} w={'5rem'}>
{t('common:core.ai.Model')}
</Box>
<Box flex={'1 0 0'}>
@@ -184,44 +197,13 @@ const AIChatSettingsModal = ({
</Table>
</TableContainer>
<Flex>
<Box {...LabelStyles} mr={2}>
{t('common:core.app.Temperature')}
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={0}
max={10}
step={1}
value={getValues(NodeInputKeyEnum.aiChatTemperature)}
onChange={(e) => {
setValue(NodeInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex mt={5}>
<Box {...LabelStyles} mr={2}>
{t('common:core.app.Max tokens')}
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={100}
max={tokenLimit}
step={200}
value={getValues(NodeInputKeyEnum.aiChatMaxToken)}
onChange={(val) => {
setValue(NodeInputKeyEnum.aiChatMaxToken, val);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
{showMaxHistoriesSlider && (
<Flex mt={5}>
<Box {...LabelStyles} mr={2}>
{t('app:max_histories_number')}
<Flex {...FlexItemStyles}>
<Box {...LabelStyles}>
<Flex alignItems={'center'}>
<Box>{t('app:max_histories_number')}</Box>
<QuestionTip label={t('app:max_histories_number_tip')} />
</Flex>
</Box>
<Box flex={'1 0 0'}>
<InputSlider
@@ -237,15 +219,71 @@ const AIChatSettingsModal = ({
</Box>
</Flex>
)}
<Flex {...FlexItemStyles}>
<Box {...LabelStyles}>
<Box>{t('app:max_tokens')}</Box>
<Switch
isChecked={maxToken !== undefined}
size={'sm'}
onChange={(e) => {
setValue('maxToken', e.target.checked ? tokenLimit / 2 : undefined);
}}
/>
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={100}
max={tokenLimit}
step={200}
isDisabled={maxToken === undefined}
value={maxToken}
onChange={(val) => {
setValue(NodeInputKeyEnum.aiChatMaxToken, val);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
<Flex {...FlexItemStyles}>
<Box {...LabelStyles}>
<Flex alignItems={'center'}>
{t('app:temperature')}
<QuestionTip label={t('app:temperature_tip')} />
</Flex>
<Switch
isChecked={temperature !== undefined}
size={'sm'}
onChange={(e) => {
setValue('temperature', e.target.checked ? 0 : undefined);
}}
/>
</Box>
<Box flex={'1 0 0'}>
<InputSlider
min={0}
max={10}
step={1}
value={temperature}
isDisabled={temperature === undefined}
onChange={(e) => {
setValue(NodeInputKeyEnum.aiChatTemperature, e);
setRefresh(!refresh);
}}
/>
</Box>
</Flex>
{showResponseAnswerText && (
<Flex mt={5} alignItems={'center'}>
<Flex {...FlexItemStyles} h={'25px'}>
<Box {...LabelStyles}>
{t('app:stream_response')}
<QuestionTip ml={1} label={t('app:stream_response_tip')}></QuestionTip>
</Box>
<Box flex={1}>
<Flex alignItems={'center'}>
{t('app:stream_response')}
<QuestionTip ml={1} label={t('app:stream_response_tip')}></QuestionTip>
</Flex>
<Switch
isChecked={getValues(NodeInputKeyEnum.aiChatIsResponseText)}
size={'sm'}
onChange={(e) => {
const value = e.target.checked;
setValue(NodeInputKeyEnum.aiChatIsResponseText, value);
@@ -256,15 +294,17 @@ const AIChatSettingsModal = ({
</Flex>
)}
{showVisionSwitch && (
<Flex mt={5} alignItems={'center'}>
<Flex {...FlexItemStyles} h={'25px'}>
<Box {...LabelStyles}>
{t('app:llm_use_vision')}
<QuestionTip ml={1} label={t('app:llm_use_vision_tip')}></QuestionTip>
</Box>
<Box flex={1}>
<Flex alignItems={'center'}>
{t('app:llm_use_vision')}
<QuestionTip ml={1} label={t('app:llm_use_vision_tip')}></QuestionTip>
</Flex>
{llmSupportVision ? (
<Switch
isChecked={useVision}
size={'sm'}
onChange={(e) => {
const value = e.target.checked;
setValue(NodeInputKeyEnum.aiChatVision, value);

View File

@@ -3,7 +3,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import { LLMModelTypeEnum, llmModelTypeFilterMap } from '@fastgpt/global/core/ai/constants';
import { Box, css, HStack, IconButton, useDisclosure } from '@chakra-ui/react';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import AISettingModal from '@/components/core/ai/AISettingModal';
import AISettingModal, { AIChatSettingsModalProps } from '@/components/core/ai/AISettingModal';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { useTranslation } from 'next-i18next';
import MyIcon from '@fastgpt/web/components/common/Icon';
@@ -17,7 +17,12 @@ type Props = {
bg?: string;
};
const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onChange }: Props) => {
const SettingLLMModel = ({
llmModelType = LLMModelTypeEnum.all,
defaultData,
onChange,
...props
}: AIChatSettingsModalProps & Props) => {
const { t } = useTranslation();
const { llmModelList } = useSystemStore();
@@ -95,6 +100,7 @@ const SettingLLMModel = ({ llmModelType = LLMModelTypeEnum.all, defaultData, onC
}}
defaultData={defaultData}
llmModels={modelList}
{...props}
/>
)}
</Box>

View File

@@ -30,6 +30,7 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import MyTextarea from '@/components/common/Textarea/MyTextarea';
import { defaultDatasetMaxTokens } from '@fastgpt/global/core/app/constants';
import InputSlider from '@fastgpt/web/components/common/MySlider/InputSlider';
export type DatasetParamsProps = {
searchMode: `${DatasetSearchModeEnum}`;
@@ -224,19 +225,12 @@ const DatasetParamsModal = ({
<Box pt={5}>
{limit !== undefined && (
<Box display={['block', 'flex']}>
<Flex flex={'0 0 120px'} mb={[8, 0]}>
<FormLabel>{t('common:core.dataset.search.Max Tokens')}</FormLabel>
<QuestionTip
ml={1}
label={t('common:core.dataset.search.Max Tokens Tips')}
></QuestionTip>
<Flex flex={'0 0 120px'} alignItems={'center'} mb={[5, 0]}>
<FormLabel>{t('app:max_quote_tokens')}</FormLabel>
<QuestionTip label={t('app:max_quote_tokens_tips')} />
</Flex>
<Box flex={1} mx={4}>
<MySlider
markList={[
{ label: '100', value: 100 },
{ label: maxTokens, value: maxTokens }
]}
<Box flex={'1 0 0'}>
<InputSlider
min={100}
max={maxTokens}
step={maxTokenStep}
@@ -249,21 +243,14 @@ const DatasetParamsModal = ({
</Box>
</Box>
)}
<Box display={['block', 'flex']} mt={10}>
<Flex flex={'0 0 120px'} mb={[8, 0]}>
<FormLabel>{t('common:core.dataset.search.Min Similarity')}</FormLabel>
<QuestionTip
ml={1}
label={t('common:core.dataset.search.Min Similarity Tips')}
></QuestionTip>
<Box display={['block', 'flex']} mt={[6, 10]} mb={4}>
<Flex flex={'0 0 120px'} alignItems={'center'} mb={[5, 0]}>
<FormLabel>{t('app:min_similarity')}</FormLabel>
<QuestionTip label={t('app:min_similarity_tip')} />
</Flex>
<Box flex={1} mx={4}>
<Box flex={'1 0 0'}>
{showSimilarity ? (
<MySlider
markList={[
{ label: '0', value: 0 },
{ label: '1', value: 1 }
]}
<InputSlider
min={0}
max={1}
step={0.01}

View File

@@ -62,8 +62,8 @@ const SearchParamsTip = ({
<Thead>
<Tr bg={'transparent !important'}>
<Th fontSize={'mini'}>{t('common:core.dataset.search.search mode')}</Th>
<Th fontSize={'mini'}>{t('common:core.dataset.search.Max Tokens')}</Th>
<Th fontSize={'mini'}>{t('common:core.dataset.search.Min Similarity')}</Th>
<Th fontSize={'mini'}>{t('app:max_quote_tokens')}</Th>
<Th fontSize={'mini'}>{t('app:min_similarity')}</Th>
{hasReRankModel && <Th fontSize={'mini'}>{t('common:core.dataset.search.ReRank')}</Th>}
<Th fontSize={'mini'}>{t('common:core.module.template.Query extension')}</Th>
{hasEmptyResponseMode && (

View File

@@ -16,8 +16,8 @@ import { findAppAndAllChildren } from '@fastgpt/service/core/app/controller';
import { MongoResourcePermission } from '@fastgpt/service/support/permission/schema';
import { ClientSession } from '@fastgpt/service/common/mongo';
import { deleteChatFiles } from '@fastgpt/service/core/chat/controller';
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
import { pushTrack } from '@fastgpt/service/common/middle/tracks/utils';
import { MongoOpenApi } from '@fastgpt/service/support/openapi/schema';
async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const { appId } = req.query as { appId: string };
@@ -77,34 +77,31 @@ export const onDelOneApp = async ({
},
{ session }
);
// 删除分享链接
await MongoOutLink.deleteMany(
{
appId
},
{ session }
);
await MongoOutLink.deleteMany({
appId
}).session(session);
// Openapi
await MongoOpenApi.deleteMany({
appId
}).session(session);
// delete version
await MongoAppVersion.deleteMany(
{
appId
},
{ session }
);
await MongoChatInputGuide.deleteMany(
{
appId
},
{ session }
);
await MongoResourcePermission.deleteMany(
{
resourceType: PerResourceTypeEnum.app,
teamId,
resourceId: appId
},
{ session }
);
await MongoAppVersion.deleteMany({
appId
}).session(session);
await MongoChatInputGuide.deleteMany({
appId
}).session(session);
await MongoResourcePermission.deleteMany({
resourceType: PerResourceTypeEnum.app,
teamId,
resourceId: appId
}).session(session);
// delete app
await MongoApp.deleteOne(
{

View File

@@ -7,6 +7,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { NextAPI } from '@/service/middleware/entry';
import { OwnerPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoDatasetCollectionTags } from '@fastgpt/service/core/dataset/tag/schema';
async function handler(req: NextApiRequest) {
const { id: datasetId } = req.query as {
@@ -30,14 +31,23 @@ async function handler(req: NextApiRequest) {
teamId,
datasetId
});
const datasetIds = datasets.map((d) => d._id);
// delete all dataset.data and pg data
await mongoSessionRun(async (session) => {
// delete dataset data
await delDatasetRelevantData({ datasets, session });
// delete collection.tags
await MongoDatasetCollectionTags.deleteMany({
teamId,
datasetId: { $in: datasetIds }
}).session(session);
// delete dataset
await MongoDataset.deleteMany(
{
_id: { $in: datasets.map((d) => d._id) }
_id: { $in: datasetIds }
},
{ session }
);

View File

@@ -32,10 +32,8 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
const llmModelData: SettingAIDataType = useMemo(
() => ({
model: inputs.find((input) => input.key === NodeInputKeyEnum.aiModel)?.value ?? '',
maxToken:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatMaxToken)?.value ?? 2048,
temperature:
inputs.find((input) => input.key === NodeInputKeyEnum.aiChatTemperature)?.value ?? 1,
maxToken: inputs.find((input) => input.key === NodeInputKeyEnum.aiChatMaxToken)?.value,
temperature: inputs.find((input) => input.key === NodeInputKeyEnum.aiChatTemperature)?.value,
isResponseAnswerText: inputs.find(
(input) => input.key === NodeInputKeyEnum.aiChatIsResponseText
)?.value,