mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
feat: model select support close params;perf: dataset params slider;update doc (#3453)
* feat: model select support close params * perf: dataset params slider * update doc * update doc * add delete log * perf: ai config overflow * test * test * test * delete collection tags * delete collection tags
This commit is contained in:
@@ -46,7 +46,7 @@ export const runToolWithFunctionCall = async (
|
||||
user,
|
||||
stream,
|
||||
workflowStreamResponse,
|
||||
params: { temperature = 0, maxToken = 4000, aiChatVision }
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
} = workflowProps;
|
||||
|
||||
// Interactive
|
||||
|
@@ -54,7 +54,7 @@ export const runToolWithPromptCall = async (
|
||||
user,
|
||||
stream,
|
||||
workflowStreamResponse,
|
||||
params: { temperature = 0, maxToken = 4000, aiChatVision }
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
} = workflowProps;
|
||||
|
||||
if (interactiveEntryToolParams) {
|
||||
|
@@ -94,7 +94,7 @@ export const runToolWithToolChoice = async (
|
||||
stream,
|
||||
user,
|
||||
workflowStreamResponse,
|
||||
params: { temperature = 0, maxToken = 4000, aiChatVision }
|
||||
params: { temperature, maxToken, aiChatVision }
|
||||
} = workflowProps;
|
||||
|
||||
if (maxRunToolTimes <= 0 && response) {
|
||||
|
@@ -71,8 +71,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
chatConfig,
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
maxToken = 4000,
|
||||
temperature,
|
||||
maxToken,
|
||||
history = 6,
|
||||
quoteQA,
|
||||
userChatInput,
|
||||
|
Reference in New Issue
Block a user