mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
perf: http empty params (#3773)
* model config * feat: normalization embedding * perf: http empty params * doc
This commit is contained in:
@@ -245,6 +245,13 @@
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-v3",
|
||||
"name": "text-embedding-v3",
|
||||
"defaultToken": 512,
|
||||
"maxToken": 8000,
|
||||
"type": "embedding"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -51,7 +51,7 @@ import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
|
||||
|
||||
export type ChatProps = ModuleDispatchProps<
|
||||
AIChatNodeProps & {
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
[NodeInputKeyEnum.userChatInput]?: string;
|
||||
[NodeInputKeyEnum.history]?: ChatItemType[] | number;
|
||||
[NodeInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
|
||||
}
|
||||
@@ -81,7 +81,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
maxToken,
|
||||
history = 6,
|
||||
quoteQA,
|
||||
userChatInput,
|
||||
userChatInput = '',
|
||||
isResponseAnswerText = true,
|
||||
systemPrompt = '',
|
||||
aiChatQuoteRole = 'system',
|
||||
|
@@ -23,10 +23,10 @@ type DatasetSearchProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.datasetSimilarity]: number;
|
||||
[NodeInputKeyEnum.datasetMaxTokens]: number;
|
||||
[NodeInputKeyEnum.datasetSearchMode]: `${DatasetSearchModeEnum}`;
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
[NodeInputKeyEnum.userChatInput]?: string;
|
||||
[NodeInputKeyEnum.datasetSearchUsingReRank]: boolean;
|
||||
[NodeInputKeyEnum.collectionFilterMatch]: string;
|
||||
[NodeInputKeyEnum.authTmbId]: boolean;
|
||||
[NodeInputKeyEnum.authTmbId]?: boolean;
|
||||
|
||||
[NodeInputKeyEnum.datasetSearchUsingExtensionQuery]: boolean;
|
||||
[NodeInputKeyEnum.datasetSearchExtensionModel]: string;
|
||||
@@ -55,7 +55,7 @@ export async function dispatchDatasetSearch(
|
||||
limit = 1500,
|
||||
usingReRank,
|
||||
searchMode,
|
||||
userChatInput,
|
||||
userChatInput = '',
|
||||
authTmbId = false,
|
||||
collectionFilterMatch,
|
||||
|
||||
|
@@ -38,10 +38,10 @@ type HttpRequestProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.abandon_httpUrl]: string;
|
||||
[NodeInputKeyEnum.httpMethod]: string;
|
||||
[NodeInputKeyEnum.httpReqUrl]: string;
|
||||
[NodeInputKeyEnum.httpHeaders]: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpParams]: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpJsonBody]: string;
|
||||
[NodeInputKeyEnum.httpFormBody]: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpHeaders]?: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpParams]?: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpJsonBody]?: string;
|
||||
[NodeInputKeyEnum.httpFormBody]?: PropsArrType[];
|
||||
[NodeInputKeyEnum.httpContentType]: ContentTypes;
|
||||
[NodeInputKeyEnum.addInputParam]: Record<string, any>;
|
||||
[NodeInputKeyEnum.httpTimeout]?: number;
|
||||
@@ -76,10 +76,10 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
params: {
|
||||
system_httpMethod: httpMethod = 'POST',
|
||||
system_httpReqUrl: httpReqUrl,
|
||||
system_httpHeader: httpHeader,
|
||||
system_httpHeader: httpHeader = [],
|
||||
system_httpParams: httpParams = [],
|
||||
system_httpJsonBody: httpJsonBody,
|
||||
system_httpFormBody: httpFormBody,
|
||||
system_httpJsonBody: httpJsonBody = '',
|
||||
system_httpFormBody: httpFormBody = [],
|
||||
system_httpContentType: httpContentType = ContentTypes.json,
|
||||
system_httpTimeout: httpTimeout = 60,
|
||||
[NodeInputKeyEnum.addInputParam]: dynamicInput,
|
||||
|
Reference in New Issue
Block a user