mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 12:20:34 +00:00
Perf system plugin and worker (#2126)
* perf: worker pool * perf: worker register * perf: worker controller * perf: system plugin worker * perf: system plugin worker * perf: worker * perf: worker * worker timeout * perf: copy icon
This commit is contained in:
@@ -441,11 +441,18 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
|
||||
// token filter
|
||||
const filterMaxTokensResult = await (async () => {
|
||||
const tokensScoreFilter = await Promise.all(
|
||||
scoreFilter.map(async (item) => ({
|
||||
...item,
|
||||
tokens: await countPromptTokens(item.q + item.a)
|
||||
}))
|
||||
);
|
||||
|
||||
const results: SearchDataResponseItemType[] = [];
|
||||
let totalTokens = 0;
|
||||
|
||||
for await (const item of scoreFilter) {
|
||||
totalTokens += await countPromptTokens(item.q + item.a);
|
||||
for await (const item of tokensScoreFilter) {
|
||||
totalTokens += item.tokens;
|
||||
|
||||
if (totalTokens > maxTokens + 500) {
|
||||
break;
|
||||
|
@@ -122,7 +122,6 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
let chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
|
||||
let chatNodeUsages: ChatNodeUsageType[] = [];
|
||||
let toolRunResponse: ToolRunResponseItemType;
|
||||
let runningTime = Date.now();
|
||||
let debugNextStepRunNodes: RuntimeNodeItemType[] = [];
|
||||
|
||||
/* Store special response field */
|
||||
@@ -142,13 +141,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]?: AIChatItemValueItemType[]; // tool module, save the response value
|
||||
}
|
||||
) {
|
||||
const time = Date.now();
|
||||
|
||||
if (responseData) {
|
||||
chatResponses.push({
|
||||
...responseData,
|
||||
runningTime: +((time - runningTime) / 1000).toFixed(2)
|
||||
});
|
||||
chatResponses.push(responseData);
|
||||
}
|
||||
if (nodeDispatchUsages) {
|
||||
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
|
||||
@@ -175,8 +169,6 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
runningTime = time;
|
||||
}
|
||||
/* Pass the output of the module to the next stage */
|
||||
function nodeOutput(
|
||||
@@ -328,6 +320,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
status: 'running'
|
||||
});
|
||||
}
|
||||
const startTime = Date.now();
|
||||
|
||||
// get node running params
|
||||
const params = getNodeRunParams(node);
|
||||
@@ -362,6 +355,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
nodeId: node.nodeId,
|
||||
moduleName: node.name,
|
||||
moduleType: node.flowNodeType,
|
||||
runningTime: +((Date.now() - startTime) / 1000).toFixed(2),
|
||||
...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]
|
||||
};
|
||||
})();
|
||||
|
Reference in New Issue
Block a user