4.8.9 test (#2299)

* perf: read file prompt

* perf: read file prompt

* perf: free plan tip

* feat: cron job usage

* perf: app templates

* perf: get llm model by name

* feat: support outlink upload file

* fix: upload limit
This commit is contained in:
Archer
2024-08-08 17:45:15 +08:00
committed by GitHub
parent c6dd3076c5
commit d682a8252f
18 changed files with 726 additions and 452 deletions

View File

@@ -1,20 +1,28 @@
export const getLLMModel = (model?: string) => { export const getLLMModel = (model?: string) => {
return global.llmModels.find((item) => item.model === model) ?? global.llmModels[0]; return (
global.llmModels.find((item) => item.model === model || item.name === model) ??
global.llmModels[0]
);
}; };
export const getDatasetModel = (model?: string) => { export const getDatasetModel = (model?: string) => {
return ( return (
global.llmModels?.filter((item) => item.datasetProcess)?.find((item) => item.model === model) ?? global.llmModels
global.llmModels[0] ?.filter((item) => item.datasetProcess)
?.find((item) => item.model === model || item.name === model) ?? global.llmModels[0]
); );
}; };
export const getVectorModel = (model?: string) => { export const getVectorModel = (model?: string) => {
return global.vectorModels.find((item) => item.model === model) || global.vectorModels[0]; return (
global.vectorModels.find((item) => item.model === model || item.name === model) ||
global.vectorModels[0]
);
}; };
export function getAudioSpeechModel(model?: string) { export function getAudioSpeechModel(model?: string) {
return ( return (
global.audioSpeechModels.find((item) => item.model === model) || global.audioSpeechModels[0] global.audioSpeechModels.find((item) => item.model === model || item.name === model) ||
global.audioSpeechModels[0]
); );
} }

View File

@@ -95,7 +95,7 @@ export const dispatchReadFiles = async (props: Props): Promise<Response> => {
}) })
.filter(Boolean) .filter(Boolean)
.slice(0, maxFiles); .slice(0, maxFiles);
console.log(parseUrlList);
const readFilesResult = await Promise.all( const readFilesResult = await Promise.all(
parseUrlList parseUrlList
.map(async (url) => { .map(async (url) => {

View File

@@ -0,0 +1 @@
<svg width="100%" height="100%" viewBox="0 0 89 32" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M52.308 3.07812H57.8465V4.92428H56.0003V6.77043H54.1541V10.4627H57.8465V12.3089H54.1541V25.232H52.308V27.0781H46.7695V25.232H48.6157V12.3089H46.7695V10.4627H48.6157V6.77043H50.4618V4.92428H52.308V3.07812Z" fill="currentColor"></path><path d="M79.3849 23.3858H81.2311V25.232H83.0772V27.0781H88.6157V25.232H86.7695V23.3858H84.9234V4.92428H79.3849V23.3858Z" fill="currentColor"></path><path d="M57.8465 14.155H59.6926V12.3089H61.5388V10.4627H70.7695V12.3089H74.4618V23.3858H76.308V25.232H78.1541V27.0781H72.6157V25.232H70.7695V23.3858H68.9234V14.155H67.0772V12.3089H65.2311V14.155H63.3849V23.3858H65.2311V25.232H67.0772V27.0781H61.5388V25.232H59.6926V23.3858H57.8465V14.155Z" fill="currentColor"></path><path d="M67.0772 25.232V23.3858H68.9234V25.232H67.0772Z" fill="currentColor"></path><rect opacity="0.22" x="7.38477" y="29.5391" width="2.46154" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.85" x="2.46094" y="19.6914" width="12.3077" height="2.46154" fill="#5F4CD9"></rect><rect x="4.92383" y="17.2305" width="9.84615" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.4" x="7.38477" y="27.0781" width="4.92308" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.7" y="22.1562" width="14.7692" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.5" x="7.38477" y="24.6133" width="7.38462" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.22" x="7.38477" y="12.3086" width="2.46154" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.85" x="2.46094" y="2.46094" width="12.3077" height="2.46154" fill="#5F4CD9"></rect><rect x="4.92383" width="9.84615" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.4" x="7.38477" y="9.84375" width="4.92308" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.7" y="4.92188" width="14.7692" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.5" x="7.38477" y="7.38281" width="7.38462" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.22" x="24.6152" y="29.5391" width="2.46154" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.85" x="19.6914" y="19.6914" width="12.3077" height="2.46154" fill="#5F4CD9"></rect><rect x="22.1543" y="17.2305" width="9.84615" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.4" x="24.6152" y="27.0781" width="4.92308" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.7" x="17.2305" y="22.1562" width="14.7692" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.5" x="24.6152" y="24.6133" width="7.38462" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.22" x="24.6152" y="12.3086" width="2.46154" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.85" x="19.6914" y="2.46094" width="12.3077" height="2.46154" fill="#5F4CD9"></rect><rect x="22.1543" width="9.84615" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.4" x="24.6152" y="9.84375" width="4.92308" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.7" x="17.2305" y="4.92188" width="14.7692" height="2.46154" fill="#5F4CD9"></rect><rect opacity="0.5" x="24.6152" y="7.38281" width="7.38462" height="2.46154" fill="#5F4CD9"></rect></svg>

After

Width:  |  Height:  |  Size: 3.1 KiB

View File

@@ -20,6 +20,7 @@ import { getDocPath } from '@/web/common/system/doc';
import AIModelSelector from '@/components/Select/AIModelSelector'; import AIModelSelector from '@/components/Select/AIModelSelector';
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d'; import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip'; import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import { getWebLLMModel } from '@/web/common/system/utils';
const AIChatSettingsModal = ({ const AIChatSettingsModal = ({
onClose, onClose,
@@ -44,18 +45,18 @@ const AIChatSettingsModal = ({
const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined; const showVisionSwitch = watch(NodeInputKeyEnum.aiChatVision) !== undefined;
const showMaxHistoriesSlider = watch('maxHistories') !== undefined; const showMaxHistoriesSlider = watch('maxHistories') !== undefined;
const useVision = watch('aiChatVision'); const useVision = watch('aiChatVision');
const selectedModel = llmModelList.find((item) => item.model === model) || llmModelList[0]; const selectedModel = getWebLLMModel(model);
const llmSupportVision = !!selectedModel?.vision; const llmSupportVision = !!selectedModel?.vision;
const tokenLimit = useMemo(() => { const tokenLimit = useMemo(() => {
return llmModelList.find((item) => item.model === model)?.maxResponse || 4096; return selectedModel?.maxResponse || 4096;
}, [llmModelList, model]); }, [selectedModel?.maxResponse]);
const onChangeModel = (e: string) => { const onChangeModel = (e: string) => {
setValue('model', e); setValue('model', e);
// update max tokens // update max tokens
const modelData = llmModelList.find((item) => item.model === e); const modelData = getWebLLMModel(e);
if (modelData) { if (modelData) {
setValue('maxToken', modelData.maxResponse / 2); setValue('maxToken', modelData.maxResponse / 2);
} }

View File

@@ -137,6 +137,7 @@ const ChatInput = ({
const { previewUrl } = await uploadFile2DB({ const { previewUrl } = await uploadFile2DB({
file: copyFile.rawFile, file: copyFile.rawFile,
bucketName: 'chat', bucketName: 'chat',
outLinkAuthData,
metadata: { metadata: {
chatId chatId
}, },
@@ -168,7 +169,7 @@ const ChatInput = ({
{ {
manual: false, manual: false,
errorToast: t('common:upload_file_error'), errorToast: t('common:upload_file_error'),
refreshDeps: [fileList] refreshDeps: [fileList, outLinkAuthData, chatId]
} }
); );
const onSelectFile = useCallback( const onSelectFile = useCallback(

View File

@@ -7,6 +7,7 @@ import {
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import React, { useMemo } from 'react'; import React, { useMemo } from 'react';
import MyIcon from '@fastgpt/web/components/common/Icon'; import MyIcon from '@fastgpt/web/components/common/Icon';
import { getWebLLMModel } from '@/web/common/system/utils';
const SearchParamsTip = ({ const SearchParamsTip = ({
searchMode, searchMode,
@@ -34,11 +35,8 @@ const SearchParamsTip = ({
const extensionModelName = useMemo( const extensionModelName = useMemo(
() => () =>
datasetSearchUsingExtensionQuery datasetSearchUsingExtensionQuery ? getWebLLMModel(queryExtensionModel)?.name : undefined,
? llmModelList.find((item) => item.model === queryExtensionModel)?.name ?? [datasetSearchUsingExtensionQuery, queryExtensionModel, llmModelList]
llmModelList[0]?.name
: undefined,
[datasetSearchUsingExtensionQuery, llmModelList, queryExtensionModel]
); );
return ( return (

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response'; import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller'; import { uploadFile } from '@fastgpt/service/common/file/gridfs/controller';
import { getUploadModel } from '@fastgpt/service/common/file/multer'; import { getUploadModel } from '@fastgpt/service/common/file/multer';
import { removeFilesByPaths } from '@fastgpt/service/common/file/utils'; import { removeFilesByPaths } from '@fastgpt/service/common/file/utils';
@@ -10,6 +9,7 @@ import { ReadFileBaseUrl } from '@fastgpt/global/common/file/constants';
import { addLog } from '@fastgpt/service/common/system/log'; import { addLog } from '@fastgpt/service/common/system/log';
import { authFrequencyLimit } from '@/service/common/frequencyLimit/api'; import { authFrequencyLimit } from '@/service/common/frequencyLimit/api';
import { addSeconds } from 'date-fns'; import { addSeconds } from 'date-fns';
import { authChatCert } from '@/service/support/permission/auth/chat';
const authUploadLimit = (tmbId: string) => { const authUploadLimit = (tmbId: string) => {
if (!global.feConfigs.uploadFileMaxAmount) return; if (!global.feConfigs.uploadFileMaxAmount) return;
@@ -21,20 +21,19 @@ const authUploadLimit = (tmbId: string) => {
}; };
async function handler(req: NextApiRequest, res: NextApiResponse<any>) { async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
const start = Date.now();
/* Creates the multer uploader */
const upload = getUploadModel({
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
});
const filePaths: string[] = []; const filePaths: string[] = [];
try { try {
const { teamId, tmbId } = await authCert({ req, authToken: true }); const start = Date.now();
/* Creates the multer uploader */
await authUploadLimit(tmbId); const upload = getUploadModel({
maxSize: (global.feConfigs?.uploadFileMaxSize || 500) * 1024 * 1024
});
const { file, bucketName, metadata } = await upload.doUpload(req, res); const { file, bucketName, metadata } = await upload.doUpload(req, res);
const { teamId, tmbId, outLinkUid } = await authChatCert({ req, authToken: true });
await authUploadLimit(outLinkUid || tmbId);
addLog.info(`Upload file success ${file.originalname}, cost ${Date.now() - start}ms`); addLog.info(`Upload file success ${file.originalname}, cost ${Date.now() - start}ms`);
if (!bucketName) { if (!bucketName) {
@@ -51,15 +50,19 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
metadata: metadata metadata: metadata
}); });
return { jsonRes(res, {
fileId, data: {
previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken({ fileId,
bucketName, previewUrl: `${ReadFileBaseUrl}?filename=${file.originalname}&token=${await createFileToken(
teamId, {
tmbId, bucketName,
fileId teamId,
})}` tmbId,
}; fileId
}
)}`
}
});
} catch (error) { } catch (error) {
jsonRes(res, { jsonRes(res, {
code: 500, code: 500,

View File

@@ -35,6 +35,7 @@ import { AppContext } from '@/pages/app/detail/components/context';
import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip'; import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel'; import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip'; import VariableTip from '@/components/common/Textarea/MyTextarea/VariableTip';
import { getWebLLMModel } from '@/web/common/system/utils';
const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal')); const DatasetSelectModal = dynamic(() => import('@/components/core/app/DatasetSelectModal'));
const DatasetParamsModal = dynamic(() => import('@/components/core/app/DatasetParamsModal')); const DatasetParamsModal = dynamic(() => import('@/components/core/app/DatasetParamsModal'));
@@ -121,8 +122,7 @@ const EditForm = ({
[appForm.chatConfig.variables, t] [appForm.chatConfig.variables, t]
); );
const selectedModel = const selectedModel = getWebLLMModel(appForm.aiSettings.model);
llmModelList.find((item) => item.model === appForm.aiSettings.model) ?? llmModelList[0];
const tokenLimit = useMemo(() => { const tokenLimit = useMemo(() => {
return selectedModel?.quoteMaxToken || 3000; return selectedModel?.quoteMaxToken || 3000;
}, [selectedModel.quoteMaxToken]); }, [selectedModel.quoteMaxToken]);

View File

@@ -29,6 +29,7 @@ import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import ValueTypeLabel from './render/ValueTypeLabel'; import ValueTypeLabel from './render/ValueTypeLabel';
import MyIcon from '@fastgpt/web/components/common/Icon'; import MyIcon from '@fastgpt/web/components/common/Icon';
import { isWorkflowStartOutput } from '@fastgpt/global/core/workflow/template/system/workflowStart'; import { isWorkflowStartOutput } from '@fastgpt/global/core/workflow/template/system/workflowStart';
import { getWebLLMModel } from '@/web/common/system/utils';
const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => { const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
const { t } = useTranslation(); const { t } = useTranslation();
@@ -46,8 +47,7 @@ const NodeDatasetConcat = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
if (item.flowNodeType === FlowNodeTypeEnum.chatNode) { if (item.flowNodeType === FlowNodeTypeEnum.chatNode) {
const model = const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || ''; item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken = const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 3000;
llmModelList.find((item) => item.model === model)?.quoteMaxToken || 3000;
maxTokens = Math.max(maxTokens, quoteMaxToken); maxTokens = Math.max(maxTokens, quoteMaxToken);
} }

View File

@@ -11,6 +11,7 @@ import { useSystemStore } from '@/web/common/system/useSystemStore';
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip'; import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
import { useContextSelector } from 'use-context-selector'; import { useContextSelector } from 'use-context-selector';
import { WorkflowContext } from '@/pages/app/detail/components/WorkflowComponents/context'; import { WorkflowContext } from '@/pages/app/detail/components/WorkflowComponents/context';
import { getWebLLMModel } from '@/web/common/system/utils';
const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => { const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode); const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
@@ -36,8 +37,7 @@ const SelectDatasetParam = ({ inputs = [], nodeId }: RenderInputProps) => {
if (item.flowNodeType === FlowNodeTypeEnum.chatNode) { if (item.flowNodeType === FlowNodeTypeEnum.chatNode) {
const model = const model =
item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || ''; item.inputs.find((item) => item.key === NodeInputKeyEnum.aiModel)?.value || '';
const quoteMaxToken = const quoteMaxToken = getWebLLMModel(model)?.quoteMaxToken || 3000;
llmModelList.find((item) => item.model === model)?.quoteMaxToken || 3000;
maxTokens = Math.max(maxTokens, quoteMaxToken); maxTokens = Math.max(maxTokens, quoteMaxToken);
} }

View File

@@ -338,7 +338,7 @@ export async function getServerSideProps(context: any) {
props: { props: {
appId: context?.query?.appId || '', appId: context?.query?.appId || '',
chatId: context?.query?.chatId || '', chatId: context?.query?.chatId || '',
...(await serviceSideProps(context, ['file', 'app', 'chat'])) ...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
} }
}; };
} }

View File

@@ -394,7 +394,7 @@ export async function getServerSideProps(context: any) {
appIntro: app?.appId?.intro ?? 'intro', appIntro: app?.appId?.intro ?? 'intro',
shareId: shareId ?? '', shareId: shareId ?? '',
authToken: authToken ?? '', authToken: authToken ?? '',
...(await serviceSideProps(context, ['file', 'app', 'chat'])) ...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
} }
}; };
} }

View File

@@ -337,7 +337,7 @@ export async function getServerSideProps(context: any) {
chatId: context?.query?.chatId || '', chatId: context?.query?.chatId || '',
teamId: context?.query?.teamId || '', teamId: context?.query?.teamId || '',
teamToken: context?.query?.teamToken || '', teamToken: context?.query?.teamToken || '',
...(await serviceSideProps(context, ['file', 'app', 'chat'])) ...(await serviceSideProps(context, ['file', 'app', 'chat', 'workflow']))
} }
}; };
} }

View File

@@ -103,7 +103,15 @@ export async function authChatCrud({
3. share page (body: shareId outLinkUid) 3. share page (body: shareId outLinkUid)
4. team chat page (body: teamId teamToken) 4. team chat page (body: teamId teamToken)
*/ */
export async function authChatCert(props: AuthModeType) { export async function authChatCert(props: AuthModeType): Promise<{
teamId: string;
tmbId: string;
authType: AuthUserTypeEnum;
apikey: string;
isOwner: boolean;
canWrite: boolean;
outLinkUid?: string;
}> {
const { teamId, teamToken, shareId, outLinkUid } = props.req.body as OutLinkChatAuthProps; const { teamId, teamToken, shareId, outLinkUid } = props.req.body as OutLinkChatAuthProps;
if (shareId && outLinkUid) { if (shareId && outLinkUid) {

View File

@@ -3,6 +3,7 @@ import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants'; import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { preUploadImgProps } from '@fastgpt/global/common/file/api'; import { preUploadImgProps } from '@fastgpt/global/common/file/api';
import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/file/img'; import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/file/img';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
/** /**
* upload file to mongo gridfs * upload file to mongo gridfs
@@ -10,11 +11,13 @@ import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/fi
export const uploadFile2DB = ({ export const uploadFile2DB = ({
file, file,
bucketName, bucketName,
outLinkAuthData,
metadata = {}, metadata = {},
percentListen percentListen
}: { }: {
file: File; file: File;
bucketName: `${BucketNameEnum}`; bucketName: `${BucketNameEnum}`;
outLinkAuthData?: OutLinkChatAuthProps;
metadata?: Record<string, any>; metadata?: Record<string, any>;
percentListen?: (percent: number) => void; percentListen?: (percent: number) => void;
}) => { }) => {
@@ -22,6 +25,14 @@ export const uploadFile2DB = ({
form.append('metadata', JSON.stringify(metadata)); form.append('metadata', JSON.stringify(metadata));
form.append('bucketName', bucketName); form.append('bucketName', bucketName);
form.append('file', file, encodeURIComponent(file.name)); form.append('file', file, encodeURIComponent(file.name));
if (outLinkAuthData) {
for (const key in outLinkAuthData) {
// @ts-ignore
outLinkAuthData[key] && form.append(key, outLinkAuthData[key]);
}
}
return postUploadFiles(form, (e) => { return postUploadFiles(form, (e) => {
if (!e.total) return; if (!e.total) return;

View File

@@ -1,3 +1,5 @@
import { useSystemStore } from './useSystemStore';
export const downloadFetch = async ({ url, filename }: { url: string; filename: string }) => { export const downloadFetch = async ({ url, filename }: { url: string; filename: string }) => {
const a = document.createElement('a'); const a = document.createElement('a');
a.href = url; a.href = url;
@@ -6,3 +8,8 @@ export const downloadFetch = async ({ url, filename }: { url: string; filename:
a.click(); a.click();
document.body.removeChild(a); document.body.removeChild(a);
}; };
export const getWebLLMModel = (model?: string) => {
const list = useSystemStore.getState().llmModelList;
return list.find((item) => item.model === model || item.name === model) ?? list[0];
};

File diff suppressed because it is too large Load Diff

View File

@@ -2,12 +2,11 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d'; import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
import { useSystemStore } from '@/web/common/system/useSystemStore'; import { useSystemStore } from '@/web/common/system/useSystemStore';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants'; import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { getWebLLMModel } from '@/web/common/system/utils';
export function checkChatSupportSelectFileByChatModels(models: string[] = []) { export function checkChatSupportSelectFileByChatModels(models: string[] = []) {
const llmModelList = useSystemStore.getState().llmModelList;
for (const model of models) { for (const model of models) {
const modelData = llmModelList.find((item) => item.model === model || item.name === model); const modelData = getWebLLMModel(model);
if (modelData?.vision) { if (modelData?.vision) {
return true; return true;
} }