mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
perf: tool value type and complections body size (#1291)
This commit is contained in:
@@ -56,7 +56,7 @@ export const runToolWithFunctionCall = async (
|
||||
> = {};
|
||||
item.toolParams.forEach((item) => {
|
||||
properties[item.key] = {
|
||||
type: 'string',
|
||||
type: item.valueType || 'string',
|
||||
description: item.toolDescription || ''
|
||||
};
|
||||
});
|
||||
@@ -76,6 +76,18 @@ export const runToolWithFunctionCall = async (
|
||||
messages,
|
||||
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
|
||||
});
|
||||
const formativeMessages = filterMessages.map((item) => {
|
||||
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant && item.function_call) {
|
||||
return {
|
||||
...item,
|
||||
function_call: {
|
||||
name: item.function_call?.name,
|
||||
arguments: item.function_call?.arguments
|
||||
}
|
||||
};
|
||||
}
|
||||
return item;
|
||||
});
|
||||
|
||||
/* Run llm */
|
||||
const ai = getAIApi({
|
||||
@@ -87,7 +99,7 @@ export const runToolWithFunctionCall = async (
|
||||
model: toolModel.model,
|
||||
temperature: 0,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
messages: formativeMessages,
|
||||
functions,
|
||||
function_call: 'auto'
|
||||
},
|
||||
|
@@ -63,7 +63,7 @@ export const runToolWithToolChoice = async (
|
||||
> = {};
|
||||
item.toolParams.forEach((item) => {
|
||||
properties[item.key] = {
|
||||
type: 'string',
|
||||
type: item.valueType || 'string',
|
||||
description: item.toolDescription || ''
|
||||
};
|
||||
});
|
||||
@@ -86,7 +86,34 @@ export const runToolWithToolChoice = async (
|
||||
messages,
|
||||
maxTokens: toolModel.maxContext - 300 // filter token. not response maxToken
|
||||
});
|
||||
|
||||
const formativeMessages = filterMessages.map((item) => {
|
||||
if (item.role === 'assistant' && item.tool_calls) {
|
||||
return {
|
||||
...item,
|
||||
tool_calls: item.tool_calls.map((tool) => ({
|
||||
id: tool.id,
|
||||
type: tool.type,
|
||||
function: tool.function
|
||||
}))
|
||||
};
|
||||
}
|
||||
return item;
|
||||
});
|
||||
// console.log(
|
||||
// JSON.stringify(
|
||||
// {
|
||||
// ...toolModel?.defaultConfig,
|
||||
// model: toolModel.model,
|
||||
// temperature: 0,
|
||||
// stream,
|
||||
// messages: formativeMessages,
|
||||
// tools,
|
||||
// tool_choice: 'auto'
|
||||
// },
|
||||
// null,
|
||||
// 2
|
||||
// )
|
||||
// );
|
||||
/* Run llm */
|
||||
const ai = getAIApi({
|
||||
timeout: 480000
|
||||
@@ -97,7 +124,7 @@ export const runToolWithToolChoice = async (
|
||||
model: toolModel.model,
|
||||
temperature: 0,
|
||||
stream,
|
||||
messages: filterMessages,
|
||||
messages: formativeMessages,
|
||||
tools,
|
||||
tool_choice: 'auto'
|
||||
},
|
||||
|
@@ -149,7 +149,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
|
||||
headers: Object.keys(headers).length > 0 ? headers : undefined,
|
||||
httpResult: rawResponse
|
||||
},
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: results,
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]:
|
||||
Object.keys(results).length > 0 ? results : rawResponse,
|
||||
[NodeOutputKeyEnum.httpRawResponse]: rawResponse,
|
||||
...results
|
||||
};
|
||||
|
Reference in New Issue
Block a user