V4.8.17 feature (#3493)

* split tokens into input and output (#3477)

* split tokens into input and output

* query extension & tool call & question guide

* fix

* perf: input and output tokens

* perf: tool call if else

* perf: remove code

* fix: extract usage count

* fix: qa usage count

---------

Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2024-12-30 10:13:25 +08:00
committed by GitHub
parent da2831b948
commit 50bf7f9a3b
46 changed files with 467 additions and 230 deletions

View File

@@ -1,17 +1,20 @@
import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { ModelTypeEnum, getModelMap } from '../../../core/ai/model';
export const formatModelChars2Points = ({
model,
tokens = 0,
inputTokens = 0,
outputTokens = 0,
modelType,
multiple = 1000
}: {
model: string;
tokens: number;
inputTokens?: number;
outputTokens?: number;
modelType: `${ModelTypeEnum}`;
multiple?: number;
}) => {
const modelData = getModelMap?.[modelType]?.(model);
const modelData = getModelMap?.[modelType]?.(model) as LLMModelItemType;
if (!modelData) {
return {
totalPoints: 0,
@@ -19,7 +22,12 @@ export const formatModelChars2Points = ({
};
}
const totalPoints = (modelData.charsPointsPrice || 0) * (tokens / multiple);
const isIOPriceType = typeof modelData.inputPrice === 'number';
const totalPoints = isIOPriceType
? (modelData.inputPrice || 0) * (inputTokens / multiple) +
(modelData.outputPrice || 0) * (outputTokens / multiple)
: (modelData.charsPointsPrice || 0) * ((inputTokens + outputTokens) / multiple);
return {
modelName: modelData.name,