perf: tool value type and complections body size (#1291)

This commit is contained in:
Archer
2024-04-26 10:54:39 +08:00
committed by GitHub
parent c608f86146
commit 89ab17ea2e
4 changed files with 47 additions and 6 deletions

View File

@@ -56,7 +56,7 @@ export const runToolWithFunctionCall = async (
> = {};
item.toolParams.forEach((item) => {
properties[item.key] = {
type: 'string',
type: item.valueType || 'string',
description: item.toolDescription || ''
};
});
@@ -76,6 +76,18 @@ export const runToolWithFunctionCall = async (
messages,
maxTokens: toolModel.maxContext - 500 // filter token. not response maxToken
});
const formativeMessages = filterMessages.map((item) => {
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant && item.function_call) {
return {
...item,
function_call: {
name: item.function_call?.name,
arguments: item.function_call?.arguments
}
};
}
return item;
});
/* Run llm */
const ai = getAIApi({
@@ -87,7 +99,7 @@ export const runToolWithFunctionCall = async (
model: toolModel.model,
temperature: 0,
stream,
messages: filterMessages,
messages: formativeMessages,
functions,
function_call: 'auto'
},

View File

@@ -63,7 +63,7 @@ export const runToolWithToolChoice = async (
> = {};
item.toolParams.forEach((item) => {
properties[item.key] = {
type: 'string',
type: item.valueType || 'string',
description: item.toolDescription || ''
};
});
@@ -86,7 +86,34 @@ export const runToolWithToolChoice = async (
messages,
maxTokens: toolModel.maxContext - 300 // filter token. not response maxToken
});
const formativeMessages = filterMessages.map((item) => {
if (item.role === 'assistant' && item.tool_calls) {
return {
...item,
tool_calls: item.tool_calls.map((tool) => ({
id: tool.id,
type: tool.type,
function: tool.function
}))
};
}
return item;
});
// console.log(
// JSON.stringify(
// {
// ...toolModel?.defaultConfig,
// model: toolModel.model,
// temperature: 0,
// stream,
// messages: formativeMessages,
// tools,
// tool_choice: 'auto'
// },
// null,
// 2
// )
// );
/* Run llm */
const ai = getAIApi({
timeout: 480000
@@ -97,7 +124,7 @@ export const runToolWithToolChoice = async (
model: toolModel.model,
temperature: 0,
stream,
messages: filterMessages,
messages: formativeMessages,
tools,
tool_choice: 'auto'
},

View File

@@ -149,7 +149,8 @@ export const dispatchHttp468Request = async (props: HttpRequestProps): Promise<H
headers: Object.keys(headers).length > 0 ? headers : undefined,
httpResult: rawResponse
},
[DispatchNodeResponseKeyEnum.toolResponses]: results,
[DispatchNodeResponseKeyEnum.toolResponses]:
Object.keys(results).length > 0 ? results : rawResponse,
[NodeOutputKeyEnum.httpRawResponse]: rawResponse,
...results
};

View File

@@ -28,6 +28,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
export const config = {
api: {
sizeLimit: '10mb',
bodyParser: {
sizeLimit: '16mb'
}