diff --git a/projects/app/package.json b/projects/app/package.json index f1604650cd..184bb87881 100644 --- a/projects/app/package.json +++ b/projects/app/package.json @@ -1,6 +1,6 @@ { "name": "app", - "version": "4.14.10", + "version": "4.14.10.1", "private": false, "scripts": { "dev": "NODE_OPTIONS='--max-old-space-size=8192' npm run build:workers && next dev", diff --git a/projects/app/src/components/core/ai/SettingLLMModel/index.tsx b/projects/app/src/components/core/ai/SettingLLMModel/index.tsx index 76f967e728..9294ff1cc5 100644 --- a/projects/app/src/components/core/ai/SettingLLMModel/index.tsx +++ b/projects/app/src/components/core/ai/SettingLLMModel/index.tsx @@ -9,21 +9,14 @@ import MyIcon from '@fastgpt/web/components/common/Icon'; import AIModelSelector from '@/components/Select/AIModelSelector'; import { getWebDefaultLLMModel } from '@/web/common/system/utils'; import { useMemoEnhance } from '@fastgpt/web/hooks/useMemoEnhance'; -import { useLatest } from 'ahooks'; type Props = { - defaultModel?: string; defaultData: SettingAIDataType; onChange: (e: SettingAIDataType) => void; bg?: string; }; -const SettingLLMModel = ({ - defaultModel, - defaultData, - onChange, - ...props -}: AIChatSettingsModalProps & Props) => { +const SettingLLMModel = ({ defaultData, onChange, ...props }: AIChatSettingsModalProps & Props) => { const { t } = useTranslation(); const { llmModelList } = useSystemStore(); @@ -38,18 +31,20 @@ const SettingLLMModel = ({ }; }, [llmModelList]); - // Set default model - const lastDefaultModel = useLatest(defaultModel); + // Reset undefined model useEffect(() => { - if (modelSet.size === 0) return; - if (!modelSet.has(model)) { - const defaultLLM = lastDefaultModel.current || defaultLLMModel; - if (defaultLLM && modelSet.has(defaultLLM)) { + if (model) { + if (modelSet.size > 0 && !modelSet.has(model) && defaultLLMModel) { onChange({ ...defaultData, - model: defaultLLM + model: defaultLLMModel }); } + } else if (defaultLLMModel) { + onChange({ + ...defaultData, + model: defaultLLMModel + }); } }, [model, defaultData, modelSet, defaultLLMModel]); diff --git a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/index.tsx b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/index.tsx index 1ece9c9213..e7d582d96a 100644 --- a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/index.tsx +++ b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/index.tsx @@ -1,5 +1,5 @@ import React, { useState } from 'react'; -import { getDefaultAppForm } from '@fastgpt/global/core/app/utils'; +import type { AppFormEditFormType } from '@fastgpt/global/core/app/formEdit/type'; import { agentForm2AppWorkflow, appWorkflow2AgentForm } from './utils'; import Header from '../FormComponent/Header'; @@ -24,14 +24,9 @@ const AgentEdit = () => { appDetail._id ); - const [appForm, setAppForm] = useState(getDefaultAppForm()); - - // Init app form - useMount(async () => { - let initialAppForm; - + const [appForm, setAppForm] = useState(() => { if (past.length === 0) { - initialAppForm = appWorkflow2AgentForm({ + return appWorkflow2AgentForm({ nodes: appDetail.modules, chatConfig: { ...appDetail.chatConfig, @@ -41,17 +36,20 @@ const AgentEdit = () => { } } }); + } + + return past[0].appForm; + }); + + // Init snapshot + useMount(() => { + if (past.length === 0) { saveSnapshot({ - appForm: initialAppForm, + appForm, title: t('app:initial_form'), isSaved: true }); - } else { - initialAppForm = past[0].appForm; } - - // Set initial app form - setAppForm(initialAppForm); }); // Save snapshot to local diff --git a/projects/app/src/pageComponents/app/detail/Edit/SimpleApp/index.tsx b/projects/app/src/pageComponents/app/detail/Edit/SimpleApp/index.tsx index 0d55f0d18d..b6b6d8033d 100644 --- a/projects/app/src/pageComponents/app/detail/Edit/SimpleApp/index.tsx +++ b/projects/app/src/pageComponents/app/detail/Edit/SimpleApp/index.tsx @@ -1,6 +1,4 @@ import React, { useState } from 'react'; -import { getDefaultAppForm } from '@fastgpt/global/core/app/utils'; - import Header from '../FormComponent/Header'; import { useContextSelector } from 'use-context-selector'; import { AppContext, TabEnum } from '../../context'; @@ -25,22 +23,16 @@ const SimpleEdit = () => { appDetail._id ); - const [appForm, setAppForm] = useState(getDefaultAppForm()); - - // Init app form - useMount(() => { + const [appForm, setAppForm] = useState(() => { if (appDetail.version !== 'v2') { - return setAppForm( - appWorkflow2Form({ - nodes: v1Workflow2V2((appDetail.modules || []) as any)?.nodes, - chatConfig: appDetail.chatConfig - }) - ); + return appWorkflow2Form({ + nodes: v1Workflow2V2((appDetail.modules || []) as any)?.nodes, + chatConfig: appDetail.chatConfig + }); } - // 初始化snapshot if (past.length === 0) { - const appForm = appWorkflow2Form({ + return appWorkflow2Form({ nodes: appDetail.modules, chatConfig: { ...appDetail.chatConfig, @@ -50,14 +42,20 @@ const SimpleEdit = () => { } } }); + } + return past[0].appForm; + }); + + // Init app form + useMount(() => { + // 初始化snapshot + if (past.length === 0) { saveSnapshot({ appForm, title: t('app:initial_form'), isSaved: true }); setAppForm(appForm); - } else { - setAppForm(past[0].appForm); } }); diff --git a/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/RenderInput/templates/SettingLLMModel.tsx b/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/RenderInput/templates/SettingLLMModel.tsx index 407fb8b15e..559a8b9cdb 100644 --- a/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/RenderInput/templates/SettingLLMModel.tsx +++ b/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/RenderInput/templates/SettingLLMModel.tsx @@ -9,7 +9,7 @@ import { useMemoEnhance } from '@fastgpt/web/hooks/useMemoEnhance'; import { useLocalStorageState } from 'ahooks'; import { getWebDefaultLLMModel } from '@/web/common/system/utils'; -const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) => { +const SelectAiModelRender = ({ inputs = [], nodeId }: RenderInputProps) => { const onChangeNode = useContextSelector(WorkflowActionsContext, (v) => v.onChangeNode); const [defaultModel, setDefaultModel] = useLocalStorageState( @@ -46,7 +46,7 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) => const llmModelData: SettingAIDataType = useMemoEnhance( () => ({ - model: inputs.find((input) => input.key === NodeInputKeyEnum.aiModel)?.value ?? '', + model: inputs.find((input) => input.key === NodeInputKeyEnum.aiModel)?.value ?? defaultModel, maxToken: inputs.find((input) => input.key === NodeInputKeyEnum.aiChatMaxToken)?.value, temperature: inputs.find((input) => input.key === NodeInputKeyEnum.aiChatTemperature)?.value, isResponseAnswerText: inputs.find( @@ -64,16 +64,10 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) => aiChatJsonSchema: inputs.find((input) => input.key === NodeInputKeyEnum.aiChatJsonSchema) ?.value }), - [inputs] + [inputs, defaultModel] ); - return ( - - ); + return ; }; export default React.memo(SelectAiModelRender);