fix: price page init data;perf: usage code;fix: reasoning tokens;fix: workflow basic node cannot upgrade (#3816)

* fix: img read

* fix: price page init data

* perf: ai model avatar

* perf: refresh in change team

* perf: null checker

* perf: usage code

* fix: reasoning tokens

* fix: workflow basic node cannot upgrade

* perf: model refresh

* perf: icon refresh
This commit is contained in:
Archer
2025-02-18 20:50:25 +08:00
committed by GitHub
parent ccf28d83b8
commit 09205e4666
32 changed files with 373 additions and 248 deletions

View File

@@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大搜索越精确但是速度越慢。设置为100有99%+精度。
}
}

View File

@@ -6,7 +6,7 @@
"systemEnv": {
"vectorMaxProcess": 15, // 向量处理线程数量
"qaMaxProcess": 15, // 问答拆分线程数量
"tokenWorkers": 50, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"tokenWorkers": 30, // Token 计算线程保持数,会持续占用内存,不能设置太大。
"pgHNSWEfSearch": 100 // 向量搜索参数。越大搜索越精确但是速度越慢。设置为100有99%+精度。
},
"llmModels": [

View File

@@ -9,7 +9,7 @@ module.exports = {
locales: ['en', 'zh-CN', 'zh-Hant'],
localeDetection: false
},
localePath:
typeof window === 'undefined' ? require('path').resolve('../../packages/web/i18n') : '/i18n',
defaultNS: 'common',
localePath: require('path').resolve('../../packages/web/i18n'),
reloadOnPrerender: process.env.NODE_ENV === 'development'
};

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo } from 'react';
import React, { useMemo } from 'react';
import { Box, Flex } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import { useLoading } from '@fastgpt/web/hooks/useLoading';
@@ -11,7 +11,7 @@ import { useI18nLng } from '@fastgpt/web/hooks/useI18n';
import Auth from './auth';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useMount } from 'ahooks';
import { useDebounceEffect, useMount } from 'ahooks';
import { useTranslation } from 'next-i18next';
import { useToast } from '@fastgpt/web/hooks/useToast';
@@ -88,23 +88,29 @@ const Layout = ({ children }: { children: JSX.Element }) => {
});
// Check model invalid
useEffect(() => {
if (userInfo?.username === 'root') {
if (llmModelList.length === 0) {
toast({
status: 'warning',
title: t('common:llm_model_not_config')
});
router.push('/account/model');
} else if (embeddingModelList.length === 0) {
toast({
status: 'warning',
title: t('common:embedding_model_not_config')
});
router.push('/account/model');
useDebounceEffect(
() => {
if (userInfo?.username === 'root') {
if (llmModelList.length === 0) {
toast({
status: 'warning',
title: t('common:llm_model_not_config')
});
router.push('/account/model');
} else if (embeddingModelList.length === 0) {
toast({
status: 'warning',
title: t('common:embedding_model_not_config')
});
router.push('/account/model');
}
}
},
[embeddingModelList.length, llmModelList.length, userInfo?.username],
{
wait: 2000
}
}, [embeddingModelList.length, llmModelList.length, userInfo?.username]);
);
return (
<>

View File

@@ -35,34 +35,46 @@ const OneRowSelector = ({ list, onchange, disableTip, ...props }: Props) => {
return props.size ? size[props.size] : size['md'];
}, [props.size]);
const avatarList = list.map((item) => {
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
item.value
);
const avatarList = useMemo(
() =>
list.map((item) => {
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
item.value
);
return {
value: item.value,
label: (
<Flex alignItems={'center'} py={1}>
<Avatar
borderRadius={'0'}
mr={2}
src={modelData?.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={avatarSize}
/>
<Box>{modelData.name}</Box>
</Flex>
)
};
});
return {
value: item.value,
label: (
<Flex alignItems={'center'} py={1}>
<Avatar
borderRadius={'0'}
mr={2}
src={modelData?.avatar || HUGGING_FACE_ICON}
fallbackSrc={HUGGING_FACE_ICON}
w={avatarSize}
/>
<Box>{modelData.name}</Box>
</Flex>
)
};
}),
[
list,
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
avatarSize
]
);
return (
<Box
@@ -99,6 +111,16 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
const { t } = useTranslation();
const { llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList } =
useSystemStore();
const modelList = useMemo(() => {
return [
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
];
}, [llmModelList, embeddingModelList, ttsModelList, sttModelList, reRankModelList]);
const [value, setValue] = useState<string[]>([]);
const avatarSize = useMemo(() => {
@@ -134,7 +156,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}));
for (const item of list) {
const modelData = getModelFromList([...llmModelList, ...embeddingModelList], item.value);
const modelData = getModelFromList(modelList, item.value);
const provider =
renderList.find((item) => item.value === (modelData?.provider || 'Other')) ??
renderList[renderList.length - 1];
@@ -146,7 +168,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
}
return renderList.filter((item) => item.children.length > 0);
}, [avatarSize, list, llmModelList, t, embeddingModelList]);
}, [avatarSize, list, modelList]);
const onSelect = useCallback(
(e: string[]) => {
@@ -156,16 +178,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
);
const SelectedModel = useMemo(() => {
const modelData = getModelFromList(
[
...llmModelList,
...embeddingModelList,
...ttsModelList,
...sttModelList,
...reRankModelList
],
props.value
);
const modelData = getModelFromList(modelList, props.value);
setValue([modelData.provider, props.value]);
@@ -181,15 +194,7 @@ const MultipleRowSelector = ({ list, onchange, disableTip, ...props }: Props) =>
<Box>{modelData?.name}</Box>
</HStack>
);
}, [
llmModelList,
embeddingModelList,
ttsModelList,
sttModelList,
reRankModelList,
props.value,
avatarSize
]);
}, [modelList, props.value, avatarSize]);
return (
<Box

View File

@@ -13,11 +13,11 @@ import { useRouter } from 'next/router';
const TeamSelector = ({
showManage,
afterSwitchTeam,
onChange,
...props
}: ButtonProps & {
}: Omit<ButtonProps, 'onChange'> & {
showManage?: boolean;
afterSwitchTeam?: () => void;
onChange?: () => void;
}) => {
const { t } = useTranslation();
const router = useRouter();
@@ -38,7 +38,7 @@ const TeamSelector = ({
{
onFinally: () => {
setLoading(false);
afterSwitchTeam?.();
onChange?.();
},
errorToast: t('common:user.team.Switch Team Failed')
}

View File

@@ -142,19 +142,36 @@ const NodeCard = (props: Props) => {
const { runAsync: onClickSyncVersion } = useRequest2(
async () => {
if (!node?.pluginId) return;
const template = await getPreviewPluginNode({ appId: node.pluginId });
if (!node) return;
if (!!template) {
if (node.pluginId) {
const template = await getPreviewPluginNode({ appId: node.pluginId });
if (!!template) {
onResetNode({
id: nodeId,
node: template
});
}
} else {
const template = moduleTemplatesFlat.find(
(item) => item.flowNodeType === node.flowNodeType
);
if (!template) {
return toast({
title: t('app:app.modules.not_found_tips'),
status: 'warning'
});
}
onResetNode({
id: nodeId,
node: template
});
}
onCloseConfirmSync();
},
{
refreshDeps: [node, nodeId, onResetNode]
refreshDeps: [node, nodeId, onResetNode],
onFinally() {}
}
);
@@ -311,7 +328,6 @@ const NodeCard = (props: Props) => {
</Box>
)}
<MenuRender nodeId={nodeId} menuForbid={menuForbid} nodeList={nodeList} />
<ConfirmSyncModal />
</Box>
);
}, [
@@ -335,7 +351,6 @@ const NodeCard = (props: Props) => {
intro,
menuForbid,
nodeList,
ConfirmSyncModal,
onChangeNode,
onOpenCustomTitleModal,
toast

View File

@@ -7,7 +7,7 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import { useTranslation } from 'next-i18next';
import React, { DragEvent, useCallback, useMemo, useState } from 'react';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useRequest } from '@fastgpt/web/hooks/useRequest';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { uploadFile2DB } from '@/web/common/file/controller';
@@ -66,9 +66,50 @@ const FileSelector = ({
'i'
);
const { mutate: onSelectFile, isLoading } = useRequest({
mutationFn: async (files: SelectFileItemType[]) => {
const { runAsync: onSelectFile, loading: isLoading } = useRequest2(
async (files: SelectFileItemType[]) => {
{
await Promise.all(
files.map(async ({ fileId, file }) => {
const { fileId: uploadFileId } = await uploadFile2DB({
file,
bucketName: BucketNameEnum.dataset,
data: {
datasetId
},
percentListen: (e) => {
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
uploadedFileRate: item.uploadedFileRate
? Math.max(e, item.uploadedFileRate)
: e
}
: item
)
);
}
});
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
dbFileId: uploadFileId,
isUploading: false,
uploadedFileRate: 100
}
: item
)
);
})
);
}
},
{
onBefore([files]) {
onStartSelect();
setSelectFiles((state) => {
const formatFiles = files.map<ImportSourceItemType>((selectFile) => {
@@ -88,52 +129,12 @@ const FileSelector = ({
const results = formatFiles.concat(state).slice(0, maxCount);
return results;
});
try {
// upload file
await Promise.all(
files.map(async ({ fileId, file }) => {
const { fileId: uploadFileId } = await uploadFile2DB({
file,
bucketName: BucketNameEnum.dataset,
data: {
datasetId
},
percentListen: (e) => {
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
uploadedFileRate: item.uploadedFileRate
? Math.max(e, item.uploadedFileRate)
: e
}
: item
)
);
}
});
setSelectFiles((state) =>
state.map((item) =>
item.id === fileId
? {
...item,
dbFileId: uploadFileId,
isUploading: false,
uploadedFileRate: 100
}
: item
)
);
})
);
} catch (error) {
console.log(error);
}
},
onFinally() {
onFinishSelect();
}
}
});
);
const selectFileCallback = useCallback(
(files: SelectFileItemType[]) => {

View File

@@ -284,7 +284,7 @@ const MyInfo = ({ onOpenContact }: { onOpenContact: () => void }) => {
<Flex mt={6} alignItems={'center'}>
<Box {...labelStyles}>{t('account_info:user_team_team_name')}:&nbsp;</Box>
<Flex flex={'1 0 0'} w={0} align={'center'}>
<TeamSelector height={'28px'} w={'100%'} showManage afterSwitchTeam={initUserInfo} />
<TeamSelector height={'28px'} w={'100%'} showManage onChange={initUserInfo} />
</Flex>
</Flex>
)}

View File

@@ -88,7 +88,7 @@ const Team = () => {
</Box>
</Flex>
<Flex align={'center'} ml={6}>
<TeamSelector height={'28px'} afterSwitchTeam={refetchMembers} />
<TeamSelector height={'28px'} onChange={refetchMembers} />
</Flex>
{userInfo?.team?.role === TeamMemberRoleEnum.owner && (
<Flex align={'center'} justify={'center'} ml={2} p={'0.44rem'}>

View File

@@ -44,12 +44,22 @@ async function handler(
defaultModels: global.systemDefaultModel
};
} catch (error) {
const referer = req.headers.referer;
if (referer?.includes('/price')) {
return {
feConfigs: global.feConfigs,
subPlans: global.subPlans,
activeModelList
};
}
const unAuthBufferId = global.systemInitBufferId ? `unAuth_${global.systemInitBufferId}` : '';
if (bufferId && unAuthBufferId === bufferId) {
return {
bufferId: unAuthBufferId
};
}
return {
bufferId: unAuthBufferId,
feConfigs: global.feConfigs

View File

@@ -5,7 +5,7 @@ import {
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import { responseWrite } from '@fastgpt/service/common/response';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
@@ -244,7 +244,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
}
pushChatUsage({
createChatUsage({
appName,
appId,
teamId,

View File

@@ -1,5 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { dispatchWorkFlow } from '@fastgpt/service/core/workflow/dispatch';
@@ -66,8 +66,8 @@ async function handler(
maxRunTimes: WORKFLOW_MAX_RUN_TIMES
});
pushChatUsage({
appName: '工作流Debug',
createChatUsage({
appName: `${app.name}-Debug`,
appId,
teamId,
tmbId,

View File

@@ -20,7 +20,7 @@ import { GPTMessages2Chats, chatValue2RuntimePrompt } from '@fastgpt/global/core
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { saveChat, updateInteractiveChat } from '@fastgpt/service/core/chat/saveChat';
import { responseWrite } from '@fastgpt/service/common/response';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { authOutLinkChatStart } from '@/service/support/permission/auth/outLink';
import { pushResult2Remote, addOutLinkUsage } from '@fastgpt/service/support/outLink/tools';
import requestIp from 'request-ip';
@@ -423,7 +423,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
// add record
const { totalPoints } = pushChatUsage({
const { totalPoints } = createChatUsage({
appName: app.name,
appId: app._id,
teamId,

View File

@@ -67,9 +67,7 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
// 检查是否是当前的 route
const navigateTo =
decodeLastRoute && !decodeLastRoute.includes('/login') ? decodeLastRoute : '/app/list';
setTimeout(() => {
router.push(navigateTo);
}, 300);
router.push(navigateTo);
},
[setUserInfo, lastRoute, router]
);

View File

@@ -1,5 +1,5 @@
import { getUserChatInfoAndAuthTeamPoints } from '@fastgpt/service/support/permission/auth/team';
import { pushChatUsage } from '@/service/support/wallet/usage/push';
import { createChatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { getNextTimeByCronStringAndTimezone } from '@fastgpt/global/common/string/time';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { delay, retryFn } from '@fastgpt/global/common/system/utils';
@@ -113,7 +113,7 @@ export const getScheduleTriggerApp = async () => {
}
]
});
pushChatUsage({
createChatUsage({
appName: app.name,
appId: app._id,
teamId: String(app.teamId),

View File

@@ -1,27 +0,0 @@
import { ConcatUsageProps, CreateUsageProps } from '@fastgpt/global/support/wallet/usage/api';
import { addLog } from '@fastgpt/service/common/system/log';
import { POST } from '@fastgpt/service/common/api/plusRequest';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export async function createUsage(data: CreateUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
await POST('/support/wallet/usage/createUsage', data);
} catch (error) {
addLog.error('createUsage error', error);
}
}
export async function concatUsage(data: ConcatUsageProps) {
if (!FastGPTProUrl) return;
if (data.totalPoints === 0) {
addLog.info('0 totalPoints', data);
}
try {
await POST('/support/wallet/usage/concatUsage', data);
} catch (error) {
addLog.error('concatUsage error', error);
}
}

View File

@@ -1,55 +1,10 @@
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { addLog } from '@fastgpt/service/common/system/log';
import { createUsage, concatUsage } from './controller';
import { createUsage, concatUsage } from '@fastgpt/service/support/wallet/usage/controller';
import { formatModelChars2Points } from '@fastgpt/service/support/wallet/usage/utils';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { i18nT } from '@fastgpt/web/i18n/utils';
import { ModelTypeEnum } from '@fastgpt/global/core/ai/model';
import { getDefaultTTSModel } from '@fastgpt/service/core/ai/model';
export const pushChatUsage = ({
appName,
appId,
pluginId,
teamId,
tmbId,
source,
flowUsages
}: {
appName: string;
appId?: string;
pluginId?: string;
teamId: string;
tmbId: string;
source: UsageSourceEnum;
flowUsages: ChatNodeUsageType[];
}) => {
const totalPoints = flowUsages.reduce((sum, item) => sum + (item.totalPoints || 0), 0);
createUsage({
teamId,
tmbId,
appName,
appId,
pluginId,
totalPoints,
source,
list: flowUsages.map((item) => ({
moduleName: item.moduleName,
amount: item.totalPoints || 0,
model: item.model,
inputTokens: item.inputTokens,
outputTokens: item.outputTokens
}))
});
addLog.info(`finish completions`, {
source,
teamId,
totalPoints
});
return { totalPoints };
};
export const pushQAUsage = async ({
teamId,
tmbId,

View File

@@ -31,7 +31,7 @@ export const uploadFile2DB = ({
if (!e.total) return;
const percent = Math.round((e.loaded / e.total) * 100);
percentListen && percentListen(percent);
percentListen?.(percent);
});
};