mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
feat: model select support close params;perf: dataset params slider;update doc (#3453)
* feat: model select support close params * perf: dataset params slider * update doc * update doc * add delete log * perf: ai config overflow * test * test * test * delete collection tags * delete collection tags
This commit is contained in:
@@ -12,10 +12,12 @@ export const computedMaxToken = async ({
|
||||
model,
|
||||
filterMessages = []
|
||||
}: {
|
||||
maxToken: number;
|
||||
maxToken?: number;
|
||||
model: LLMModelItemType;
|
||||
filterMessages: ChatCompletionMessageParam[];
|
||||
}) => {
|
||||
if (maxToken === undefined) return;
|
||||
|
||||
maxToken = Math.min(maxToken, model.maxResponse);
|
||||
const tokensLimit = model.maxContext;
|
||||
|
||||
@@ -63,12 +65,13 @@ export const llmCompletionsBodyFormat = <T extends CompletionsBodyType>(
|
||||
|
||||
const requestBody: T = {
|
||||
...body,
|
||||
temperature: body.temperature
|
||||
? computedTemperature({
|
||||
model: modelData,
|
||||
temperature: body.temperature
|
||||
})
|
||||
: undefined,
|
||||
temperature:
|
||||
typeof body.temperature === 'number'
|
||||
? computedTemperature({
|
||||
model: modelData,
|
||||
temperature: body.temperature
|
||||
})
|
||||
: undefined,
|
||||
...modelData?.defaultConfig
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user