mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-16 16:04:34 +00:00
perf: http empty params (#3773)
* model config * feat: normalization embedding * perf: http empty params * doc
This commit is contained in:
@@ -13,9 +13,10 @@ weight: 804
|
|||||||
|
|
||||||
1. 新增 - 弃用/已删除的插件提示。
|
1. 新增 - 弃用/已删除的插件提示。
|
||||||
2. 新增 - 对话日志按来源分类、标题检索、导出功能。
|
2. 新增 - 对话日志按来源分类、标题检索、导出功能。
|
||||||
3. 新增 - LLM 模型支持 top_p, response_format, json_schema 参数。
|
3. 新增 - 全局变量支持拖拽排序。
|
||||||
4. 新增 - Doubao1.5 模型预设。
|
4. 新增 - LLM 模型支持 top_p, response_format, json_schema 参数。
|
||||||
5. 新增 - 向量模型支持归一化配置,以便适配未归一化的向量模型,例如 Doubao 的 embedding 模型。
|
5. 新增 - Doubao1.5 模型预设。阿里 embedding3 预设。
|
||||||
|
6. 新增 - 向量模型支持归一化配置,以便适配未归一化的向量模型,例如 Doubao 的 embedding 模型。
|
||||||
6. 新增 - AI 对话节点,支持输出思考过程结果,可用于其他节点引用。
|
6. 新增 - AI 对话节点,支持输出思考过程结果,可用于其他节点引用。
|
||||||
7. 优化 - 模型未配置时错误提示。
|
7. 优化 - 模型未配置时错误提示。
|
||||||
8. 优化 - 适配非 Stream 模式思考输出。
|
8. 优化 - 适配非 Stream 模式思考输出。
|
||||||
|
@@ -26,7 +26,7 @@ export const simpleText = (text = '') => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const valToStr = (val: any) => {
|
export const valToStr = (val: any) => {
|
||||||
if (val === undefined) return 'undefined';
|
if (val === undefined) return '';
|
||||||
if (val === null) return 'null';
|
if (val === null) return 'null';
|
||||||
|
|
||||||
if (typeof val === 'object') return JSON.stringify(val);
|
if (typeof val === 'object') return JSON.stringify(val);
|
||||||
|
@@ -292,13 +292,12 @@ export const getReferenceVariableValue = ({
|
|||||||
|
|
||||||
export const formatVariableValByType = (val: any, valueType?: WorkflowIOValueTypeEnum) => {
|
export const formatVariableValByType = (val: any, valueType?: WorkflowIOValueTypeEnum) => {
|
||||||
if (!valueType) return val;
|
if (!valueType) return val;
|
||||||
|
if (val === undefined || val === null) return;
|
||||||
// Value type check, If valueType invalid, return undefined
|
// Value type check, If valueType invalid, return undefined
|
||||||
if (valueType.startsWith('array') && !Array.isArray(val)) return undefined;
|
if (valueType.startsWith('array') && !Array.isArray(val)) return undefined;
|
||||||
if (valueType === WorkflowIOValueTypeEnum.boolean) return Boolean(val);
|
if (valueType === WorkflowIOValueTypeEnum.boolean) return Boolean(val);
|
||||||
if (valueType === WorkflowIOValueTypeEnum.number) return Number(val);
|
if (valueType === WorkflowIOValueTypeEnum.number) return Number(val);
|
||||||
if (valueType === WorkflowIOValueTypeEnum.string) {
|
if (valueType === WorkflowIOValueTypeEnum.string) {
|
||||||
if (val === undefined) return 'undefined';
|
|
||||||
if (val === null) return 'null';
|
|
||||||
return typeof val === 'object' ? JSON.stringify(val) : String(val);
|
return typeof val === 'object' ? JSON.stringify(val) : String(val);
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
|
@@ -245,6 +245,13 @@
|
|||||||
"showTopP": true,
|
"showTopP": true,
|
||||||
"showStopSign": true,
|
"showStopSign": true,
|
||||||
"responseFormatList": ["text", "json_object"]
|
"responseFormatList": ["text", "json_object"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"model": "text-embedding-v3",
|
||||||
|
"name": "text-embedding-v3",
|
||||||
|
"defaultToken": 512,
|
||||||
|
"maxToken": 8000,
|
||||||
|
"type": "embedding"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -51,7 +51,7 @@ import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
|||||||
|
|
||||||
export type ChatProps = ModuleDispatchProps<
|
export type ChatProps = ModuleDispatchProps<
|
||||||
AIChatNodeProps & {
|
AIChatNodeProps & {
|
||||||
[NodeInputKeyEnum.userChatInput]: string;
|
[NodeInputKeyEnum.userChatInput]?: string;
|
||||||
[NodeInputKeyEnum.history]?: ChatItemType[] | number;
|
[NodeInputKeyEnum.history]?: ChatItemType[] | number;
|
||||||
[NodeInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
|
[NodeInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
|
||||||
}
|
}
|
||||||
@@ -81,7 +81,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
|||||||
maxToken,
|
maxToken,
|
||||||
history = 6,
|
history = 6,
|
||||||
quoteQA,
|
quoteQA,
|
||||||
userChatInput,
|
userChatInput = '',
|
||||||
isResponseAnswerText = true,
|
isResponseAnswerText = true,
|
||||||
systemPrompt = '',
|
systemPrompt = '',
|
||||||
aiChatQuoteRole = 'system',
|
aiChatQuoteRole = 'system',
|
||||||
|
@@ -23,10 +23,10 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
|||||||
[NodeInputKeyEnum.datasetSimilarity]: number;
|
[NodeInputKeyEnum.datasetSimilarity]: number;
|
||||||
[NodeInputKeyEnum.datasetMaxTokens]: number;
|
[NodeInputKeyEnum.datasetMaxTokens]: number;
|
||||||
[NodeInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
[NodeInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
||||||
[NodeInputKeyEnum.userChatInput]: string;
|
[NodeInputKeyEnum.userChatInput]?: string;
|
||||||
[NodeInputKeyEnum.datasetSearchUsingReRank]: boolean;
|
[NodeInputKeyEnum.datasetSearchUsingReRank]: boolean;
|
||||||
[NodeInputKeyEnum.collectionFilterMatch]: string;
|
[NodeInputKeyEnum.collectionFilterMatch]: string;
|
||||||
[NodeInputKeyEnum.authTmbId]: boolean;
|
[NodeInputKeyEnum.authTmbId]?: boolean;
|
||||||
|
|
||||||
[NodeInputKeyEnum.datasetSearchUsingExtensionQuery]: boolean;
|
[NodeInputKeyEnum.datasetSearchUsingExtensionQuery]: boolean;
|
||||||
[NodeInputKeyEnum.datasetSearchExtensionModel]: string;
|
[NodeInputKeyEnum.datasetSearchExtensionModel]: string;
|
||||||
@@ -55,7 +55,7 @@ export async function dispatchDatasetSearch(
|
|||||||
limit = 1500,
|
limit = 1500,
|
||||||
usingReRank,
|
usingReRank,
|
||||||
searchMode,
|
searchMode,
|
||||||
userChatInput,
|
userChatInput = '',
|
||||||
authTmbId = false,
|
authTmbId = false,
|
||||||
collectionFilterMatch,
|
collectionFilterMatch,
|
||||||
|
|
||||||
|
@@ -38,10 +38,10 @@ type HttpRequestProps = ModuleDispatchProps<{
|
|||||||
[NodeInputKeyEnum.abandon_httpUrl]: string;
|
[NodeInputKeyEnum.abandon_httpUrl]: string;
|
||||||
[NodeInputKeyEnum.httpMethod]: string;
|
[NodeInputKeyEnum.httpMethod]: string;
|
||||||
[NodeInputKeyEnum.httpReqUrl]: string;
|
[NodeInputKeyEnum.httpReqUrl]: string;
|
||||||
[NodeInputKeyEnum.httpHeaders]: PropsArrType[];
|
[NodeInputKeyEnum.httpHeaders]?: PropsArrType[];
|
||||||
[NodeInputKeyEnum.httpParams]: PropsArrType[];
|
[NodeInputKeyEnum.httpParams]?: PropsArrType[];
|
||||||
[NodeInputKeyEnum.httpJsonBody]: string;
|
[NodeInputKeyEnum.httpJsonBody]?: string;
|
||||||
[NodeInputKeyEnum.httpFormBody]: PropsArrType[];
|
[NodeInputKeyEnum.httpFormBody]?: PropsArrType[];
|
||||||
[NodeInputKeyEnum.httpContentType]: ContentTypes;
|
[NodeInputKeyEnum.httpContentType]: ContentTypes;
|
||||||
[NodeInputKeyEnum.addInputParam]: Record<string, any>;
|
[NodeInputKeyEnum.addInputParam]: Record<string, any>;
|
||||||
[NodeInputKeyEnum.httpTimeout]?: number;
|
[NodeInputKeyEnum.httpTimeout]?: number;
|
||||||
@@ -76,10 +76,10 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
|||||||
params: {
|
params: {
|
||||||
system_httpMethod: httpMethod = 'POST',
|
system_httpMethod: httpMethod = 'POST',
|
||||||
system_httpReqUrl: httpReqUrl,
|
system_httpReqUrl: httpReqUrl,
|
||||||
system_httpHeader: httpHeader,
|
system_httpHeader: httpHeader = [],
|
||||||
system_httpParams: httpParams = [],
|
system_httpParams: httpParams = [],
|
||||||
system_httpJsonBody: httpJsonBody,
|
system_httpJsonBody: httpJsonBody = '',
|
||||||
system_httpFormBody: httpFormBody,
|
system_httpFormBody: httpFormBody = [],
|
||||||
system_httpContentType: httpContentType = ContentTypes.json,
|
system_httpContentType: httpContentType = ContentTypes.json,
|
||||||
system_httpTimeout: httpTimeout = 60,
|
system_httpTimeout: httpTimeout = 60,
|
||||||
[NodeInputKeyEnum.addInputParam]: dynamicInput,
|
[NodeInputKeyEnum.addInputParam]: dynamicInput,
|
||||||
|
@@ -34,7 +34,6 @@ import LightRowTabs from '@fastgpt/web/components/common/Tabs/LightRowTabs';
|
|||||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||||
import { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io.d';
|
import { FlowNodeInputItemType } from '@fastgpt/global/core/workflow/type/io.d';
|
||||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||||
import JSONEditor from '@fastgpt/web/components/common/Textarea/JsonEditor';
|
|
||||||
import { EditorVariableLabelPickerType } from '@fastgpt/web/components/common/Textarea/PromptEditor/type';
|
import { EditorVariableLabelPickerType } from '@fastgpt/web/components/common/Textarea/PromptEditor/type';
|
||||||
import HttpInput from '@fastgpt/web/components/common/Input/HttpInput';
|
import HttpInput from '@fastgpt/web/components/common/Input/HttpInput';
|
||||||
import dynamic from 'next/dynamic';
|
import dynamic from 'next/dynamic';
|
||||||
|
@@ -81,7 +81,7 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
|
|||||||
router.push(navigateTo);
|
router.push(navigateTo);
|
||||||
}, 300);
|
}, 300);
|
||||||
},
|
},
|
||||||
[lastRoute, router, setUserInfo, llmModelList]
|
[setUserInfo, llmModelList?.length, lastRoute, toast, t, router]
|
||||||
);
|
);
|
||||||
|
|
||||||
const DynamicComponent = useMemo(() => {
|
const DynamicComponent = useMemo(() => {
|
||||||
|
Reference in New Issue
Block a user