mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 03:48:24 +00:00
4.8.9 test (#2299)
* perf: read file prompt * perf: read file prompt * perf: free plan tip * feat: cron job usage * perf: app templates * perf: get llm model by name * feat: support outlink upload file * fix: upload limit
This commit is contained in:
@@ -3,6 +3,7 @@ import { UploadImgProps } from '@fastgpt/global/common/file/api';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { preUploadImgProps } from '@fastgpt/global/common/file/api';
|
||||
import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/file/img';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
|
||||
/**
|
||||
* upload file to mongo gridfs
|
||||
@@ -10,11 +11,13 @@ import { compressBase64Img, type CompressImgProps } from '@fastgpt/web/common/fi
|
||||
export const uploadFile2DB = ({
|
||||
file,
|
||||
bucketName,
|
||||
outLinkAuthData,
|
||||
metadata = {},
|
||||
percentListen
|
||||
}: {
|
||||
file: File;
|
||||
bucketName: `${BucketNameEnum}`;
|
||||
outLinkAuthData?: OutLinkChatAuthProps;
|
||||
metadata?: Record<string, any>;
|
||||
percentListen?: (percent: number) => void;
|
||||
}) => {
|
||||
@@ -22,6 +25,14 @@ export const uploadFile2DB = ({
|
||||
form.append('metadata', JSON.stringify(metadata));
|
||||
form.append('bucketName', bucketName);
|
||||
form.append('file', file, encodeURIComponent(file.name));
|
||||
|
||||
if (outLinkAuthData) {
|
||||
for (const key in outLinkAuthData) {
|
||||
// @ts-ignore
|
||||
outLinkAuthData[key] && form.append(key, outLinkAuthData[key]);
|
||||
}
|
||||
}
|
||||
|
||||
return postUploadFiles(form, (e) => {
|
||||
if (!e.total) return;
|
||||
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { useSystemStore } from './useSystemStore';
|
||||
|
||||
export const downloadFetch = async ({ url, filename }: { url: string; filename: string }) => {
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
@@ -6,3 +8,8 @@ export const downloadFetch = async ({ url, filename }: { url: string; filename:
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
};
|
||||
|
||||
export const getWebLLMModel = (model?: string) => {
|
||||
const list = useSystemStore.getState().llmModelList;
|
||||
return list.find((item) => item.model === model || item.name === model) ?? list[0];
|
||||
};
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -2,12 +2,11 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node.d';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { getWebLLMModel } from '@/web/common/system/utils';
|
||||
|
||||
export function checkChatSupportSelectFileByChatModels(models: string[] = []) {
|
||||
const llmModelList = useSystemStore.getState().llmModelList;
|
||||
|
||||
for (const model of models) {
|
||||
const modelData = llmModelList.find((item) => item.model === model || item.name === model);
|
||||
const modelData = getWebLLMModel(model);
|
||||
if (modelData?.vision) {
|
||||
return true;
|
||||
}
|
||||
|
Reference in New Issue
Block a user