mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 21:13:50 +00:00
feat: get tokens from api usage (#4671)
This commit is contained in:
@@ -1,4 +1,13 @@
|
||||
import { i18nT } from '../../../web/i18n/utils';
|
||||
import type { CompletionUsage } from './type';
|
||||
|
||||
export const getLLMDefaultUsage = (): CompletionUsage => {
|
||||
return {
|
||||
prompt_tokens: 0,
|
||||
completion_tokens: 0,
|
||||
total_tokens: 0
|
||||
};
|
||||
};
|
||||
|
||||
export enum ChatCompletionRequestMessageRoleEnum {
|
||||
'System' = 'system',
|
||||
|
2
packages/global/core/ai/type.d.ts
vendored
2
packages/global/core/ai/type.d.ts
vendored
@@ -10,6 +10,7 @@ import type {
|
||||
} from 'openai/resources';
|
||||
import { ChatMessageTypeEnum } from './constants';
|
||||
import { WorkflowInteractiveResponseType } from '../workflow/template/system/interactive/type';
|
||||
import { Stream } from 'openai/streaming';
|
||||
export * from 'openai/resources';
|
||||
|
||||
// Extension of ChatCompletionMessageParam, Add file url type
|
||||
@@ -84,6 +85,7 @@ export type CompletionFinishReason =
|
||||
|
||||
export default openai;
|
||||
export * from 'openai';
|
||||
export type { Stream };
|
||||
|
||||
// Other
|
||||
export type PromptTemplateItem = {
|
||||
|
Reference in New Issue
Block a user