feat: get tokens from api usage (#4671)

This commit is contained in:
Archer
2025-04-27 01:13:38 +08:00
committed by GitHub
parent 0720bbe4da
commit 1465999c46
26 changed files with 270 additions and 223 deletions

View File

@@ -1,4 +1,13 @@
import { i18nT } from '../../../web/i18n/utils';
import type { CompletionUsage } from './type';
export const getLLMDefaultUsage = (): CompletionUsage => {
return {
prompt_tokens: 0,
completion_tokens: 0,
total_tokens: 0
};
};
export enum ChatCompletionRequestMessageRoleEnum {
'System' = 'system',

View File

@@ -10,6 +10,7 @@ import type {
} from 'openai/resources';
import { ChatMessageTypeEnum } from './constants';
import { WorkflowInteractiveResponseType } from '../workflow/template/system/interactive/type';
import { Stream } from 'openai/streaming';
export * from 'openai/resources';
// Extension of ChatCompletionMessageParam, Add file url type
@@ -84,6 +85,7 @@ export type CompletionFinishReason =
export default openai;
export * from 'openai';
export type { Stream };
// Other
export type PromptTemplateItem = {

View File

@@ -185,7 +185,6 @@ export const mergeChatResponseData = (
runningTime: +((lastResponse.runningTime || 0) + (curr.runningTime || 0)).toFixed(2),
totalPoints: (lastResponse.totalPoints || 0) + (curr.totalPoints || 0),
childTotalPoints: (lastResponse.childTotalPoints || 0) + (curr.childTotalPoints || 0),
toolCallTokens: (lastResponse.toolCallTokens || 0) + (curr.toolCallTokens || 0),
toolDetail: [...(lastResponse.toolDetail || []), ...(curr.toolDetail || [])],
loopDetail: [...(lastResponse.loopDetail || []), ...(curr.loopDetail || [])],
pluginDetail: [...(lastResponse.pluginDetail || []), ...(curr.pluginDetail || [])]

View File

@@ -186,7 +186,6 @@ export type DispatchNodeResponseType = {
ifElseResult?: string;
// tool
toolCallTokens?: number;
toolCallInputTokens?: number;
toolCallOutputTokens?: number;
toolDetail?: ChatHistoryItemResType[];