fix: price page init data;perf: usage code;fix: reasoning tokens;fix: workflow basic node cannot upgrade (#3816)

* fix: img read

* fix: price page init data

* perf: ai model avatar

* perf: refresh in change team

* perf: null checker

* perf: usage code

* fix: reasoning tokens

* fix: workflow basic node cannot upgrade

* perf: model refresh

* perf: icon refresh
This commit is contained in:
Archer
2025-02-18 20:50:25 +08:00
committed by GitHub
parent ccf28d83b8
commit 09205e4666
32 changed files with 373 additions and 248 deletions

View File

@@ -11,7 +11,13 @@ weight: 802
## 完整更新内容
1. 新增 - AI 对话节点解析 <think></think> 标签内容,便于各类模型进行思考链输出。
2. 修复 - 思考链流输出时,有时与正文顺序偏差
3. 修复 - API 调用工作流,如果传递的图片不支持 Head 检测时,图片会被过滤。已增加该类错误检测,避免被错误过滤
4. 修复 - 模板市场部分模板错误。
5. 修复 - 免登录窗口无法正常判断语言识别是否开启
2. 优化 - 模型未配置时提示,减少冲突提示
3. 优化 - 使用记录代码
4. 修复 - 思考内容未进入到输出 Tokens.
5. 修复 - 思考链流输出时,有时与正文顺序偏差
6. 修复 - API 调用工作流,如果传递的图片不支持 Head 检测时,图片会被过滤。已增加该类错误检测,避免被错误过滤。
7. 修复 - 模板市场部分模板错误。
8. 修复 - 免登录窗口无法正常判断语言识别是否开启。
9. 修复 - 对话日志导出,未兼容 sub path。
10. 修复 - list 接口在联查 member 时,存在空指针可能性。
11. 修复 - 工作流基础节点无法升级。

View File

@@ -20,4 +20,4 @@ export const ReadFileBaseUrl = `${process.env.FILE_DOMAIN || process.env.FE_DOMA
export const documentFileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';
export const imageFileType =
'.jpg, .jpeg, .png, .gif, .bmp, .webp, .svg, .tiff, .tif, .ico, .heic, .heif, .avif';
'.jpg, .jpeg, .png, .gif, .bmp, .webp, .svg, .tiff, .tif, .ico, .heic, .heif, .avif, .raw, .cr2, .nef, .arw, .dng, .psd, .ai, .eps, .emf, .wmf, .jfif, .exif, .pgm, .ppm, .pbm, .jp2, .j2k, .jpf, .jpx, .jpm, .mj2, .xbm, .pcx';

View File

@@ -46,6 +46,7 @@ export type ChatCompletionMessageParam = (
| CustomChatCompletionToolMessageParam
| CustomChatCompletionAssistantMessageParam
) & {
reasoning_text?: string;
dataId?: string;
hideInUI?: boolean;
};

View File

@@ -46,7 +46,16 @@ export const chats2GPTMessages = ({
messages.forEach((item) => {
const dataId = reserveId ? item.dataId : undefined;
if (item.obj === ChatRoleEnum.Human) {
if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else if (item.obj === ChatRoleEnum.Human) {
const value = item.value
.map((item) => {
if (item.type === ChatItemValueTypeEnum.text) {
@@ -80,15 +89,6 @@ export const chats2GPTMessages = ({
role: ChatCompletionRequestMessageRoleEnum.User,
content: simpleUserContentPart(value)
});
} else if (item.obj === ChatRoleEnum.System) {
const content = item.value?.[0]?.text?.content;
if (content) {
results.push({
dataId,
role: ChatCompletionRequestMessageRoleEnum.System,
content
});
}
} else {
const aiResults: ChatCompletionMessageParam[] = [];

View File

@@ -26,15 +26,18 @@ export async function uploadMongoImg({
const [base64Mime, base64Data] = base64Img.split(',');
// Check if mime type is valid
if (!base64MimeRegex.test(base64Mime)) {
return Promise.reject('Invalid image mime type');
return Promise.reject('Invalid image base64');
}
const mime = `image/${base64Mime.match(base64MimeRegex)?.[1] ?? 'image/jpeg'}`;
const binary = Buffer.from(base64Data, 'base64');
const extension = mime.split('/')[1];
let extension = mime.split('/')[1];
if (extension.startsWith('x-')) {
extension = extension.substring(2); // Remove 'x-' prefix
}
if (!imageFileType.includes(`.${extension}`)) {
return Promise.reject('Invalid image file type');
if (!extension || !imageFileType.includes(`.${extension}`)) {
return Promise.reject(`Invalid image file type: ${mime}`);
}
const { _id } = await MongoImage.create({

View File

@@ -25,7 +25,7 @@ export const countGptMessagesTokens = async (
number
>({
name: WorkerNameEnum.countGptMessagesTokens,
maxReservedThreads: global.systemEnv?.tokenWorkers || 50
maxReservedThreads: global.systemEnv?.tokenWorkers || 30
});
const total = await workerController.run({ messages, tools, functionCall });

View File

@@ -270,7 +270,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const AIMessages: ChatCompletionMessageParam[] = [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
content: answerText
content: answerText,
reasoning_text: reasoningText // reasoning_text is only recorded for response, but not for request
}
];

View File

@@ -232,9 +232,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
chatNodeUsages = chatNodeUsages.concat(nodeDispatchUsages);
}
if (toolResponses !== undefined) {
if (toolResponses !== undefined && toolResponses !== null) {
if (Array.isArray(toolResponses) && toolResponses.length === 0) return;
if (typeof toolResponses === 'object' && Object.keys(toolResponses).length === 0) return;
if (
!Array.isArray(toolResponses) &&
typeof toolResponses === 'object' &&
Object.keys(toolResponses).length === 0
)
return;
toolRunResponse = toolResponses;
}

View File

@@ -1,6 +1,114 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { MongoUsage } from './schema';
import { ClientSession } from '../../../common/mongo';
import { ClientSession, Types } from '../../../common/mongo';
import { addLog } from '../../../common/system/log';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
import { i18nT } from '../../../../web/i18n/utils';
import { pushConcatBillTask, pushReduceTeamAiPointsTask } from './utils';
import { POST } from '../../../common/api/plusRequest';
import { FastGPTProUrl } from '../../../common/system/constants';
export async function createUsage(data: CreateUsageProps) {
try {
// In FastGPT server
if (FastGPTProUrl) {
await POST('/support/wallet/usage/createUsage', data);
} else if (global.reduceAiPointsQueue) {
// In FastGPT pro server
await MongoUsage.create(data);
pushReduceTeamAiPointsTask({ teamId: data.teamId, totalPoints: data.totalPoints });
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
}
} catch (error) {
addLog.error('createUsage error', error);
}
}
export async function concatUsage(data: ConcatUsageProps) {
try {
// In FastGPT server
if (FastGPTProUrl) {
await POST('/support/wallet/usage/concatUsage', data);
} else if (global.reduceAiPointsQueue) {
const {
teamId,
billId,
totalPoints = 0,
listIndex,
inputTokens = 0,
outputTokens = 0
} = data;
// billId is required and valid
if (!billId || !Types.ObjectId.isValid(billId)) return;
// In FastGPT pro server
pushConcatBillTask([
{
billId,
listIndex,
inputTokens,
outputTokens,
totalPoints
}
]);
pushReduceTeamAiPointsTask({ teamId, totalPoints });
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
}
} catch (error) {
addLog.error('concatUsage error', error);
}
}
export const createChatUsage = ({
appName,
appId,
pluginId,
teamId,
tmbId,
source,
flowUsages
}: {
appName: string;
appId?: string;
pluginId?: string;
teamId: string;
tmbId: string;
source: UsageSourceEnum;
flowUsages: ChatNodeUsageType[];
}) => {
const totalPoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
createUsage({
teamId,
tmbId,
appName,
appId,
pluginId,
totalPoints,
source,
list: flowUsages.map((item) => ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
inputTokens: item.inputTokens,
outputTokens: item.outputTokens
}))
});
addLog.debug(`Create chat usage`, {
source,
teamId,
totalPoints
});
return { totalPoints };
};
export const createTrainingUsage = async ({
teamId,
@@ -29,21 +137,21 @@ export const createTrainingUsage = async ({
totalPoints: 0,
list: [
{
moduleName: 'support.wallet.moduleName.index',
moduleName: i18nT('common:support.wallet.moduleName.index'),
model: vectorModel,
amount: 0,
inputTokens: 0,
outputTokens: 0
},
{
moduleName: 'support.wallet.moduleName.qa',
moduleName: i18nT('common:support.wallet.moduleName.qa'),
model: agentModel,
amount: 0,
inputTokens: 0,
outputTokens: 0
},
{
moduleName: 'core.dataset.training.Auto mode',
moduleName: i18nT('common:core.dataset.training.Auto mode'),
model: agentModel,
amount: 0,
inputTokens: 0,

View File

@@ -0,0 +1,12 @@
export type ConcatBillQueueItemType = {
billId: string;
listIndex?: number;
totalPoints: number;
inputTokens: number;
outputTokens: number;
};
declare global {
var reduceAiPointsQueue: { teamId: string; totalPoints: number }[];
var concatBillQueue: ConcatBillQueueItemType[];
}

View File

@@ -1,5 +1,6 @@
import { findAIModel } from '../../../core/ai/model';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { ConcatBillQueueItemType } from './type';
export const formatModelChars2Points = ({
model,
@@ -34,3 +35,20 @@ export const formatModelChars2Points = ({
totalPoints
};
};
export const pushReduceTeamAiPointsTask = ({
teamId,
totalPoints
}: {
teamId: string;
totalPoints: number;
}) => {
global.reduceAiPointsQueue.push({
teamId: String(teamId),
totalPoints
});
};
export const pushConcatBillTask = (data: ConcatBillQueueItemType[]) => {
global.concatBillQueue.push(...data);
};

View File

@@ -72,7 +72,7 @@ parentPort?.on(
};
const total =
messages.reduce((sum, item) => {
messages.reduce((sum, item, index) => {
// Evaluates the text of toolcall and functioncall
const functionCallPrompt = (() => {
let prompt = '';
@@ -100,7 +100,13 @@ parentPort?.on(
.join('');
})();
return sum + countPromptTokens(`${contentPrompt}${functionCallPrompt}`, item.role);
// Only the last message computed reasoning_text
const reasoningText = index === messages.length - 1 ? item.reasoning_text || '' : '';
return (
sum +
countPromptTokens(`${reasoningText}${contentPrompt}${functionCallPrompt}`, item.role)
);
}, 0) +
countToolsTokens(tools) +
countToolsTokens(functionCall);

View File

@@ -1,17 +1,17 @@
import React, { useEffect, useState } from 'react';
import React, { useEffect } from 'react';
import type { IconProps } from '@chakra-ui/react';
import { Box, Icon } from '@chakra-ui/react';
import { iconPaths } from './constants';
import type { IconNameType } from './type.d';
import { useRefresh } from '../../../hooks/useRefresh';
const iconCache: Record<string, any> = {};
const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType } & IconProps) => {
const [IconComponent, setIconComponent] = useState<any>(null);
const { refresh } = useRefresh();
useEffect(() => {
if (iconCache[name]) {
setIconComponent(iconCache[name]);
return;
}
@@ -20,11 +20,13 @@ const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType
const component = { as: icon.default };
// Store in cache
iconCache[name] = component;
setIconComponent(component);
refresh();
})
.catch((error) => console.log(error));
}, [name]);
const IconComponent = iconCache[name];
return !!IconComponent ? (
<Icon
{...IconComponent}
@@ -40,4 +42,4 @@ const MyIcon = ({ name, w = 'auto', h = 'auto', ...props }: { name: IconNameType
);
};
export default MyIcon;
export default React.memo(MyIcon);

View File

@@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大搜索越精确但是速度越慢。设置为100有99%+精度。
}
}

View File

@@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大搜索越精确但是速度越慢。设置为100有99%+精度。
},
"llmModels": [

View File

@@ -9,7 +9,7 @@ module.exports = {
locales: ['en', 'zh-CN', 'zh-Hant'],
localeDetection: false
},
localePath:
typeof window === 'undefined' ? require('path').resolve('../../packages/web/i18n') : '/i18n',
defaultNS: 'common',
localePath: require('path').resolve('../../packages/web/i18n'),
reloadOnPrerender: process.env.NODE_ENV === 'development'
};

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo } from 'react';
import React, { useMemo } from 'react';
import { Box, Flex } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import { useLoading } from '@fastgpt/web/hooks/useLoading';
@@ -11,7 +11,7 @@ import { useI18nLng } from '@fastgpt/web/hooks/useI18n';
import Auth from './auth';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useMount } from 'ahooks';
import { useDebounceEffect, useMount } from 'ahooks';
import { useTranslation } from 'next-i18next';
import { useToast } from '@fastgpt/web/hooks/useToast';
@@ -88,7 +88,8 @@ const Layout = ({ children }: { children: JSX.Element }) => {
});
// Check model invalid
useEffect(() => {
useDebounceEffect(
() => {
if (userInfo?.username === 'root') {
if (llmModelList.length === 0) {
toast({
@@ -104,7 +105,12 @@ const Layout = ({ children }: { children: JSX.Element }) => {
router.push('/account/model');
}
}
}, [embeddingModelList.length, llmModelList.length, userInfo?.username]);
},
[embeddingModelList.length, llmModelList.length, userInfo?.username],
{
wait: 2000
}
);
return (
<>

View File

@@ -35,7 +35,9 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
return props.size ? size[props.size] : size['md'];
}, [props.size]);
const avatarList = list.map((item) => {
const avatarList = useMemo(
() =>
list.map((item) => {
const modelData = getModelFromList(
[
...llmModelList,
@@ -62,7 +64,17 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
</Flex>
)
};
});
}),
[
list,
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
avatarSize
]
);
return (
<Box
@@ -99,6 +111,16 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
const { t } = useTranslation();
const { llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList } =
useSystemStore();
const modelList = useMemo(() => {
return [
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
];
}, [llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList]);
const [value, setValue] = useState<string[]>([]);
const avatarSize = useMemo(() => {
@@ -134,7 +156,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}));
for (const item of list) {
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], item.value);
const modelData = getModelFromList(modelList, item.value);
const provider =
renderList.find((item) => item.value === (modelData?.provider || 'Other')) ??
renderList[renderList.length - 1];
@@ -146,7 +168,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}
return renderList.filter((item) => item.children.length > 0);
}, [avatarSize, list, llmModelList, t, embeddingModelList]);
}, [avatarSize, list, modelList]);
const onSelect = useCallback(
(e: string[]) => {
@@ -156,16 +178,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
);
const SelectedModel = useMemo(() => {
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
props.value
);
const modelData = getModelFromList(modelList, props.value);
setValue([modelData.provider, props.value]);
@@ -181,15 +194,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
<Box>{modelData?.name}</Box>
</HStack>
);
}, [
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
props.value,
avatarSize
]);
}, [modelList, props.value, avatarSize]);
return (
<Box

View File

@@ -13,11 +13,11 @@ import { useRouter } from 'next/router';
const TeamSelector = ({
showManage,
afterSwitchTeam,
onChange,
...props
}: ButtonProps & {
}: Omit<ButtonProps, 'onChange'> & {
showManage?: boolean;
afterSwitchTeam?: () => void;
onChange?: () => void;
}) => {
const { t } = useTranslation();
const router = useRouter();
@@ -38,7 +38,7 @@ const TeamSelector = ({
{
onFinally: () => {
setLoading(false);
afterSwitchTeam?.();
onChange?.();
},
errorToast: t('common:user.team.Switch Team Failed')
}

View File

@@ -142,7 +142,9 @@ const NodeCard = (props: Props) => {
const { runAsync: onClickSyncVersion } = useRequest2(
async () => {
if (!node?.pluginId) return;
if (!node) return;
if (node.pluginId) {
const template = await getPreviewPluginNode({ appId: node.pluginId });
if (!!template) {
@@ -151,10 +153,25 @@ const NodeCard = (props: Props) => {
node: template
});
}
onCloseConfirmSync();
} else {
const template = moduleTemplatesFlat.find(
(item) => item.flowNodeType === node.flowNodeType
);
if (!template) {
return toast({
title: t('app:app.modules.not_found_tips'),
status: 'warning'
});
}
onResetNode({
id: nodeId,
node: template
});
}
},
{
refreshDeps: [node, nodeId, onResetNode]
refreshDeps: [node, nodeId, onResetNode],
onFinally() {}
}
);
@@ -311,7 +328,6 @@ const NodeCard = (props: Props) => {
</Box>
)}
<MenuRender nodeId={nodeId} menuForbid={menuForbid} nodeList={nodeList} />
<ConfirmSyncModal />
</Box>
);
}, [
@@ -335,7 +351,6 @@ const NodeCard = (props: Props) => {
intro,
menuForbid,
nodeList,
ConfirmSyncModal,
onChangeNode,
onOpenCustomTitleModal,
toast

View File

@@ -7,7 +7,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import React, { DragEvent, useCallback, useMemo, useState } from 'react';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { uploadFile2DB } from '@/web/common/file/controller';
@@ -66,30 +66,9 @@ const FileSelector = ({
'i'
);
const { mutate: onSelectFile, isLoading } = useRequest({
mutationFn: async (files: SelectFileItemType[]) => {
const { runAsync: onSelectFile, loading: isLoading } = useRequest2(
async (files: SelectFileItemType[]) => {
{
onStartSelect();
setSelectFiles((state) => {
const formatFiles = files.map<ImportSourceItemType>((selectFile) => {
const { fileId, file } = selectFile;
return {
id: fileId,
createStatus: 'waiting',
file,
sourceName: file.name,
sourceSize: formatFileSize(file.size),
icon: getFileIcon(file.name),
isUploading: true,
uploadedFileRate: 0
};
});
const results = formatFiles.concat(state).slice(0, maxCount);
return results;
});
try {
// upload file
await Promise.all(
files.map(async ({ fileId, file }) => {
const { fileId: uploadFileId } = await uploadFile2DB({
@@ -127,13 +106,35 @@ const FileSelector = ({
);
})
);
} catch (error) {
console.log(error);
}
},
{
onBefore([files]) {
onStartSelect();
setSelectFiles((state) => {
const formatFiles = files.map<ImportSourceItemType>((selectFile) => {
const { fileId, file } = selectFile;
return {
id: fileId,
createStatus: 'waiting',
file,
sourceName: file.name,
sourceSize: formatFileSize(file.size),
icon: getFileIcon(file.name),
isUploading: true,
uploadedFileRate: 0
};
});
const results = formatFiles.concat(state).slice(0, maxCount);
return results;
});
},
onFinally() {
onFinishSelect();
}
}
});
);
const selectFileCallback = useCallback(
(files: SelectFileItemType[]) => {

View File

@@ -284,7 +284,7 @@ const MyInfo = ({ onOpenContact }: { onOpenContact: () => void }) => {
<Flex mt={6} alignItems={'center'}>
<Box {...labelStyles}>{t('account_info:user_team_team_name')}:&nbsp;</Box>
<Flex flex={'1 0 0'} w={0} align={'center'}>
<TeamSelector height={'28px'} w={'100%'} showManage afterSwitchTeam={initUserInfo} />
<TeamSelector height={'28px'} w={'100%'} showManage onChange={initUserInfo} />
</Flex>
</Flex>
)}

View File

@@ -88,7 +88,7 @@ const Team = () => {
</Box>
</Flex>
<Flex align={'center'} ml={6}>
<TeamSelector height={'28px'} afterSwitchTeam={refetchMembers} />
<TeamSelector height={'28px'} onChange={refetchMembers} />
</Flex>
{userInfo?.team?.role === TeamMemberRoleEnum.owner && (
<Flex align={'center'} justify={'center'} ml={2} p={'0.44rem'}>

View File

@@ -44,12 +44,22 @@ async function handler(
defaultModels: global.systemDefaultModel
};
} catch (error) {
const referer = req.headers.referer;
if (referer?.includes('/price')) {
return {
feConfigs: global.feConfigs,
subPlans: global.subPlans,
activeModelList
};
}
const unAuthBufferId = global.systemInitBufferId ? `unAuth_${global.systemInitBufferId}` : '';
if (bufferId && unAuthBufferId === bufferId) {
return {
bufferId: unAuthBufferId
};
}
return {
bufferId: unAuthBufferId,
feConfigs: global.feConfigs

View File

@@ -5,7 +5,7 @@ import {
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import { responseWrite } from '@fastgpt/service/common/response';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
@@ -244,7 +244,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
}
pushChatUsage({
createChatUsage({
appName,
appId,
teamId,

View File

@@ -1,5 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { dispatchWorkFlow } from '@fastgpt/service/core/workflow/dispatch';
@@ -66,8 +66,8 @@ async function handler(
maxRunTimes: WORKFLOW_MAX_RUN_TIMES
});
pushChatUsage({
appName: '工作流Debug',
createChatUsage({
appName: `${app.name}-Debug`,
appId,
teamId,
tmbId,

View File

@@ -20,7 +20,7 @@ import { GPTMessages2Chats, chatValue2RuntimePrompt } from '@fastgpt/global/core
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { saveChat, updateInteractiveChat } from '@fastgpt/service/core/chat/saveChat';
import { responseWrite } from '@fastgpt/service/common/response';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { authOutLinkChatStart } from '@/service/support/permission/auth/outLink';
import { pushResult2Remote, addOutLinkUsage } from '@fastgpt/service/support/outLink/tools';
import requestIp from 'request-ip';
@@ -423,7 +423,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
// add record
const { totalPoints } = pushChatUsage({
const { totalPoints } = createChatUsage({
appName: app.name,
appId: app._id,
teamId,

View File

@@ -67,9 +67,7 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
// 检查是否是当前的 route
const navigateTo =
decodeLastRoute && !decodeLastRoute.includes('/login') ? decodeLastRoute : '/app/list';
setTimeout(() => {
router.push(navigateTo);
}, 300);
},
[setUserInfo, lastRoute, router]
);

View File

@@ -1,5 +1,5 @@
import { getUserChatInfoAndAuthTeamPoints } from '@fastgpt/service/support/permission/auth/team';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { getNextTimeByCronStringAndTimezone } from '@fastgpt/global/common/string/time';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
@@ -113,7 +113,7 @@ export const getScheduleTriggerApp = async () => {
}
]
});
pushChatUsage({
createChatUsage({
appName: app.name,
appId: app._id,
teamId: String(app.teamId),

View File

@@ -1,27 +0,0 @@
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export async function createUsage(data: CreateUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
await POST('/support/wallet/usage/createUsage', data);
} catch (error) {
addLog.error('createUsage error', error);
}
}
export async function concatUsage(data: ConcatUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
await POST('/support/wallet/usage/concatUsage', data);
} catch (error) {
addLog.error('concatUsage error', error);
}
}

View File

@@ -1,55 +1,10 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { addLog } from '@fastgpt/service/common/system/log';
import { createUsage, concatUsage } from './controller';
import { createUsage, concatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { getDefaultTTSModel } from '@fastgpt/service/core/ai/model';
export const pushChatUsage = ({
appName,
appId,
pluginId,
teamId,
tmbId,
source,
flowUsages
}: {
appName: string;
appId?: string;
pluginId?: string;
teamId: string;
tmbId: string;
source: UsageSourceEnum;
flowUsages: ChatNodeUsageType[];
}) => {
const totalPoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
createUsage({
teamId,
tmbId,
appName,
appId,
pluginId,
totalPoints,
source,
list: flowUsages.map((item) => ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
inputTokens: item.inputTokens,
outputTokens: item.outputTokens
}))
});
addLog.info(`finish completions`, {
source,
teamId,
totalPoints
});
return { totalPoints };
};
export const pushQAUsage = async ({
teamId,
tmbId,

View File

@@ -31,7 +31,7 @@ export const uploadFile2DB = ({
if (!e.total) return;
const percent = Math.round((e.loaded / e.total) * 100);
percentListen && percentListen(percent);
percentListen?.(percent);
});
};