diff --git a/docSite/content/zh-cn/docs/development/modelConfig/intro.md b/docSite/content/zh-cn/docs/development/modelConfig/intro.md index 39877413f..26d8a86da 100644 --- a/docSite/content/zh-cn/docs/development/modelConfig/intro.md +++ b/docSite/content/zh-cn/docs/development/modelConfig/intro.md @@ -43,7 +43,8 @@ weight: 744 {{% alert icon="🤖 " context="success" %}} 注意: 1. 目前语音识别模型和重排模型仅会生效一个,所以配置时候,只需要配置一个即可。 -2. 用于知识库文件处理的语言模型,至少需要开启一个,否则知识库会报错。 +2. 系统必须至少有一个语言模型和一个索引模型才能正常使用。 +3. 使用知识库功能,至少要有一个语言模型,用于知识库文件处理(可以在模型配置时候打开该开关),否则知识库会报错。 {{% /alert %}} #### 核心配置 diff --git a/docSite/content/zh-cn/docs/development/upgrading/4821.md b/docSite/content/zh-cn/docs/development/upgrading/4821.md index fc2ad35b3..509694135 100644 --- a/docSite/content/zh-cn/docs/development/upgrading/4821.md +++ b/docSite/content/zh-cn/docs/development/upgrading/4821.md @@ -12,15 +12,17 @@ weight: 804 ## 完整更新内容 1. 新增 - 弃用/已删除的插件提示。 -2. 新增 - LLM 模型支持 top_p, response_format, json_schema 参数。 -3. 新增 - Doubao1.5 模型预设。 -4. 新增 - 向量模型支持归一化配置,以便适配未归一化的向量模型,例如 Doubao 的 embedding 模型。 -5. 新增 - AI 对话节点,支持输出思考过程结果,可用于其他节点引用。 -6. 优化 - 模型未配置时错误提示。 -7. 优化 - 适配非 Stream 模式思考输出。 -8. 优化 - 增加 TTS voice 未配置时的空指针保护。 -9. 优化 - Markdown 链接解析分割规则,改成严格匹配模式,牺牲兼容多种情况,减少误解析。 -10. 修复 - 简易模式,切换到其他非视觉模型时候,会强制关闭图片识别。 -11. 修复 - o1,o3 模型,在测试时候字段映射未生效导致报错。 -12. 修复 - 公众号对话空指针异常。 -13. 修复 - 多个音频/视频文件展示异常。 \ No newline at end of file +2. 新增 - 对话日志按来源分类、标题检索、导出功能。 +3. 新增 - LLM 模型支持 top_p, response_format, json_schema 参数。 +4. 新增 - Doubao1.5 模型预设。 +5. 新增 - 向量模型支持归一化配置,以便适配未归一化的向量模型,例如 Doubao 的 embedding 模型。 +6. 新增 - AI 对话节点,支持输出思考过程结果,可用于其他节点引用。 +7. 优化 - 模型未配置时错误提示。 +8. 优化 - 适配非 Stream 模式思考输出。 +9. 优化 - 增加 TTS voice 未配置时的空指针保护。 +10. 优化 - Markdown 链接解析分割规则,改成严格匹配模式,牺牲兼容多种情况,减少误解析。 +11. 修复 - 简易模式,切换到其他非视觉模型时候,会强制关闭图片识别。 +12. 修复 - o1,o3 模型,在测试时候字段映射未生效导致报错。 +13. 修复 - 公众号对话空指针异常。 +14. 修复 - 多个音频/视频文件展示异常。 +15. 修复 - 分享链接鉴权报错后无限循环。 \ No newline at end of file diff --git a/packages/service/core/chat/chatSchema.ts b/packages/service/core/chat/chatSchema.ts index 53a6569ee..b4a0afe62 100644 --- a/packages/service/core/chat/chatSchema.ts +++ b/packages/service/core/chat/chatSchema.ts @@ -88,7 +88,7 @@ try { ChatSchema.index({ appId: 1, chatId: 1 }); // get chat logs; - ChatSchema.index({ teamId: 1, appId: 1, updateTime: -1 }); + ChatSchema.index({ teamId: 1, appId: 1, updateTime: -1, sources: 1 }); // get share chat history ChatSchema.index({ shareId: 1, outLinkUid: 1, updateTime: -1 }); diff --git a/packages/service/core/dataset/training/utils.ts b/packages/service/core/dataset/training/utils.ts index 35ff6aabd..484eac0b6 100644 --- a/packages/service/core/dataset/training/utils.ts +++ b/packages/service/core/dataset/training/utils.ts @@ -1,45 +1,5 @@ -import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type'; -import { addLog } from '../../../common/system/log'; -import { getErrText } from '@fastgpt/global/common/error/utils'; -import { MongoDatasetTraining } from './schema'; import Papa from 'papaparse'; -export const checkInvalidChunkAndLock = async ({ - err, - errText, - data -}: { - err: any; - errText: string; - data: DatasetTrainingSchemaType; -}) => { - if (err?.response) { - addLog.error(`openai error: ${errText}`, { - status: err.response?.status, - statusText: err.response?.statusText, - data: err.response?.data - }); - } else { - addLog.error(getErrText(err, errText), err); - } - - if ( - err?.message === 'invalid message format' || - err?.type === 'invalid_request_error' || - err?.code === 500 - ) { - addLog.error('Lock training data', err); - - try { - await MongoDatasetTraining.findByIdAndUpdate(data._id, { - lockTime: new Date('2998/5/5') - }); - } catch (error) {} - return true; - } - return false; -}; - export const parseCsvTable2Chunks = (rawText: string) => { const csvArr = Papa.parse(rawText).data as string[][]; diff --git a/packages/web/hooks/usePagination.tsx b/packages/web/hooks/usePagination.tsx index 5e65d11f3..a8e2a559d 100644 --- a/packages/web/hooks/usePagination.tsx +++ b/packages/web/hooks/usePagination.tsx @@ -53,7 +53,7 @@ export function usePagination( const isEmpty = total === 0 && !isLoading; const noMore = data.length >= totalDataLength; - const fetchData = useLockFn( + const fetchData = useMemoizedFn( async (num: number = pageNum, ScrollContainerRef?: RefObject) => { if (noMore && num !== 1) return; setTrue(); @@ -99,11 +99,12 @@ export function usePagination( onChange?.(num); } catch (error: any) { - toast({ - title: getErrText(error, t('common:core.chat.error.data_error')), - status: 'error' - }); - console.log(error); + if (error.code !== 'ERR_CANCELED') { + toast({ + title: getErrText(error, t('common:core.chat.error.data_error')), + status: 'error' + }); + } } setFalse(); @@ -246,7 +247,6 @@ export function usePagination( // Reload data const { runAsync: refresh } = useRequest( async () => { - setData([]); defaultRequest && fetchData(1); }, { diff --git a/projects/app/src/pageComponents/app/detail/Logs/index.tsx b/projects/app/src/pageComponents/app/detail/Logs/index.tsx index e84440ae0..f62fa7b28 100644 --- a/projects/app/src/pageComponents/app/detail/Logs/index.tsx +++ b/projects/app/src/pageComponents/app/detail/Logs/index.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useMemo, useState } from 'react'; +import React, { useMemo, useState } from 'react'; import { Flex, Box, @@ -35,7 +35,6 @@ import SearchInput from '@fastgpt/web/components/common/Input/SearchInput'; import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConfirm'; import { useRequest2 } from '@fastgpt/web/hooks/useRequest'; import { downloadFetch } from '@/web/common/system/utils'; -import { debounce } from 'lodash'; const DetailLogsModal = dynamic(() => import('./DetailLogsModal')); @@ -51,15 +50,6 @@ const Logs = () => { const [detailLogsId, setDetailLogsId] = useState(); const [logTitle, setLogTitle] = useState(); - const [inputValue, setInputValue] = useState(''); - - useEffect(() => { - const timer = setTimeout(() => { - setLogTitle(inputValue); - }, 500); - - return () => clearTimeout(timer); - }, [inputValue]); const { value: chatSources, @@ -172,8 +162,8 @@ const Logs = () => { setInputValue(e.target.value)} + value={logTitle} + onChange={(e) => setLogTitle(e.target.value)} /> diff --git a/projects/app/src/pages/_error.tsx b/projects/app/src/pages/_error.tsx index e5f97e910..b7e91c69b 100644 --- a/projects/app/src/pages/_error.tsx +++ b/projects/app/src/pages/_error.tsx @@ -4,10 +4,12 @@ import { serviceSideProps } from '@fastgpt/web/common/system/nextjs'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { Box } from '@chakra-ui/react'; import { TrackEventName } from '@/web/common/system/constants'; +import { useToast } from '@fastgpt/web/hooks/useToast'; function Error() { const router = useRouter(); - const { lastRoute } = useSystemStore(); + const { toast } = useToast(); + const { lastRoute, llmModelList, embeddingModelList } = useSystemStore(); useEffect(() => { setTimeout(() => { @@ -20,8 +22,34 @@ function Error() { }); }, 1000); + let modelError = false; + if (llmModelList.length === 0) { + modelError = true; + toast({ + title: '未配置语言模型', + status: 'error' + }); + } else if (!llmModelList.some((item) => item.datasetProcess)) { + modelError = true; + toast({ + title: '未配置知识库文件处理模型', + status: 'error' + }); + } + if (embeddingModelList.length === 0) { + modelError = true; + toast({ + title: '未配置索引模型', + status: 'error' + }); + } + setTimeout(() => { - router.back(); + if (modelError) { + router.push('/account/model'); + } else { + router.push('/app/list'); + } }, 2000); }, []); diff --git a/projects/app/src/pages/api/core/app/exportChatLogs.ts b/projects/app/src/pages/api/core/app/exportChatLogs.ts index d561431c6..94fe0edee 100644 --- a/projects/app/src/pages/api/core/app/exportChatLogs.ts +++ b/projects/app/src/pages/api/core/app/exportChatLogs.ts @@ -227,12 +227,7 @@ async function handler(req: ApiRequestProps, res: NextAp }); } ); - let chatDetailsStr = ''; - try { - chatDetailsStr = JSON.stringify(chatDetails).replace(/"/g, '""').replace(/\n/g, '\\n'); - } catch (e) { - addLog.error(`export chat logs error`, e); - } + let chatDetailsStr = JSON.stringify(chatDetails).replace(/"/g, '""').replace(/\n/g, '\\n'); const res = `\n"${time}","${source}","${tmb}","${title}","${messageCount}","${userFeedbackCount}","${customFeedbacksCount}","${markCount}","${chatDetailsStr}"`; diff --git a/projects/app/src/pages/api/core/chat/outLink/init.ts b/projects/app/src/pages/api/core/chat/outLink/init.ts index a758a3411..2c167880b 100644 --- a/projects/app/src/pages/api/core/chat/outLink/init.ts +++ b/projects/app/src/pages/api/core/chat/outLink/init.ts @@ -1,6 +1,5 @@ import type { NextApiRequest, NextApiResponse } from 'next'; -import { jsonRes } from '@fastgpt/service/common/response'; -import type { InitChatResponse, InitOutLinkChatProps } from '@/global/core/chat/api.d'; +import type { InitOutLinkChatProps } from '@/global/core/chat/api.d'; import { getGuideModule, getAppChatConfig } from '@fastgpt/global/core/workflow/utils'; import { authOutLink } from '@/service/support/permission/auth/outLink'; import { MongoApp } from '@fastgpt/service/core/app/schema'; @@ -39,29 +38,27 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { nodes?.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)?.inputs ?? []; - jsonRes(res, { - data: { - chatId, - appId: app._id, - title: chat?.title, - userAvatar: getRandomUserAvatar(), - variables: chat?.variables, - app: { - chatConfig: getAppChatConfig({ - chatConfig, - systemConfigNode: getGuideModule(nodes), - storeVariables: chat?.variableList, - storeWelcomeText: chat?.welcomeText, - isPublicFetch: false - }), - name: app.name, - avatar: app.avatar, - intro: app.intro, - type: app.type, - pluginInputs - } + return { + chatId, + appId: app._id, + title: chat?.title, + userAvatar: getRandomUserAvatar(), + variables: chat?.variables, + app: { + chatConfig: getAppChatConfig({ + chatConfig, + systemConfigNode: getGuideModule(nodes), + storeVariables: chat?.variableList, + storeWelcomeText: chat?.welcomeText, + isPublicFetch: false + }), + name: app.name, + avatar: app.avatar, + intro: app.intro, + type: app.type, + pluginInputs } - }); + }; } export default NextAPI(handler); diff --git a/projects/app/src/pages/api/core/dataset/training/rebuildEmbedding.ts b/projects/app/src/pages/api/core/dataset/training/rebuildEmbedding.ts index 1ea953595..063ae04a1 100644 --- a/projects/app/src/pages/api/core/dataset/training/rebuildEmbedding.ts +++ b/projects/app/src/pages/api/core/dataset/training/rebuildEmbedding.ts @@ -117,7 +117,8 @@ async function handler(req: ApiRequestProps): Promise { { manual: false, refreshDeps: [shareId, outLinkAuthData, chatId], - onError(e: any) { - if (chatId) { - onChangeChatId(''); - } - }, onFinally() { forbidLoadChat.current = false; } @@ -333,11 +328,11 @@ const Render = (props: Props) => { return () => { setOutLinkAuthData({}); }; - }, [chatHistoryProviderParams.outLinkUid, setOutLinkAuthData, shareId]); + }, [chatHistoryProviderParams.outLinkUid, shareId]); // Watch appId useEffect(() => { setAppId(appId); - }, [appId, setAppId]); + }, [appId]); return source === ChatSourceEnum.share ? ( diff --git a/projects/app/src/service/events/generateQA.ts b/projects/app/src/service/events/generateQA.ts index fc213f83f..755fc3607 100644 --- a/projects/app/src/service/events/generateQA.ts +++ b/projects/app/src/service/events/generateQA.ts @@ -10,7 +10,6 @@ import { Prompt_AgentQA } from '@fastgpt/global/core/ai/prompt/agent'; import type { PushDatasetDataChunkProps } from '@fastgpt/global/core/dataset/api.d'; import { getLLMModel } from '@fastgpt/service/core/ai/model'; import { checkTeamAiPointsAndLock } from './utils'; -import { checkInvalidChunkAndLock } from '@fastgpt/service/core/dataset/training/utils'; import { addMinutes } from 'date-fns'; import { countGptMessagesTokens, @@ -168,13 +167,9 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`; reduceQueue(); generateQA(); } catch (err: any) { - addLog.error(`[QA Queue] Error`); + addLog.error(`[QA Queue] Error`, err); reduceQueue(); - if (await checkInvalidChunkAndLock({ err, data, errText: 'QA模型调用失败' })) { - return generateQA(); - } - setTimeout(() => { generateQA(); }, 1000); diff --git a/projects/app/src/service/events/generateVector.ts b/projects/app/src/service/events/generateVector.ts index 92ea0451c..22f35d6b9 100644 --- a/projects/app/src/service/events/generateVector.ts +++ b/projects/app/src/service/events/generateVector.ts @@ -3,7 +3,6 @@ import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/sch import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants'; import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push'; import { checkTeamAiPointsAndLock } from './utils'; -import { checkInvalidChunkAndLock } from '@fastgpt/service/core/dataset/training/utils'; import { addMinutes } from 'date-fns'; import { addLog } from '@fastgpt/service/common/system/log'; import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema'; @@ -126,10 +125,6 @@ export async function generateVector(): Promise { addLog.error(`[Vector Queue] Error`, err); reduceQueue(); - if (await checkInvalidChunkAndLock({ err, data, errText: '向量模型调用失败' })) { - return generateVector(); - } - setTimeout(() => { generateVector(); }, 1000); @@ -193,7 +188,8 @@ const rebuildData = async ({ billId: trainingData.billId, mode: TrainingModeEnum.chunk, model: trainingData.model, - dataId: newRebuildingData._id + dataId: newRebuildingData._id, + retryCount: 50 } ], { session } diff --git a/projects/app/src/service/support/permission/auth/chat.ts b/projects/app/src/service/support/permission/auth/chat.ts index 140096fce..121d10742 100644 --- a/projects/app/src/service/support/permission/auth/chat.ts +++ b/projects/app/src/service/support/permission/auth/chat.ts @@ -63,8 +63,8 @@ export async function authChatCrud({ authType: AuthUserTypeEnum.teamDomain }; - const chat = await MongoChat.findOne({ appId, chatId, outLinkUid: uid }).lean(); - if (!chat) + const chat = await MongoChat.findOne({ appId, chatId }).lean(); + if (!chat) { return { teamId: spaceTeamId, tmbId, @@ -72,6 +72,9 @@ export async function authChatCrud({ ...defaultResponseShow, authType: AuthUserTypeEnum.teamDomain }; + } + + if (chat.outLinkUid !== uid) return Promise.reject(ChatErrEnum.unAuthChat); return { teamId: spaceTeamId, @@ -104,7 +107,8 @@ export async function authChatCrud({ }; } - const chat = await MongoChat.findOne({ appId, chatId, outLinkUid: uid }).lean(); + const chat = await MongoChat.findOne({ appId, chatId }).lean(); + if (!chat) { return { teamId: String(outLinkConfig.teamId), @@ -116,6 +120,7 @@ export async function authChatCrud({ authType: AuthUserTypeEnum.outLink }; } + if (chat.outLinkUid !== uid) return Promise.reject(ChatErrEnum.unAuthChat); return { teamId: String(outLinkConfig.teamId), tmbId: String(outLinkConfig.tmbId), diff --git a/projects/app/src/web/common/api/request.ts b/projects/app/src/web/common/api/request.ts index 20bbe2010..8c67f3a5b 100644 --- a/projects/app/src/web/common/api/request.ts +++ b/projects/app/src/web/common/api/request.ts @@ -10,6 +10,7 @@ import { TeamErrEnum } from '@fastgpt/global/common/error/code/team'; import { useSystemStore } from '../system/useSystemStore'; import { getWebReqUrl } from '@fastgpt/web/common/system/utils'; import { i18nT } from '@fastgpt/web/i18n/utils'; +import { getNanoid } from '@fastgpt/global/common/string/tools'; interface ConfigType { headers?: { [key: string]: string }; @@ -27,41 +28,48 @@ interface ResponseDataType { const maxQuantityMap: Record< string, - { - amount: number; - sign: AbortController; - } + | undefined + | { + id: string; + sign: AbortController; + }[] > = {}; +/* + Every request generates a unique sign + If the number of requests exceeds maxQuantity, cancel the earliest request and initiate a new request +*/ function checkMaxQuantity({ url, maxQuantity }: { url: string; maxQuantity?: number }) { - if (maxQuantity) { - const item = maxQuantityMap[url]; - const controller = new AbortController(); + if (!maxQuantity) return {}; + const item = maxQuantityMap[url]; + const id = getNanoid(); + const sign = new AbortController(); - if (item) { - if (item.amount >= maxQuantity) { - !item.sign?.signal?.aborted && item.sign?.abort?.(); - maxQuantityMap[url] = { - amount: 1, - sign: controller - }; - } else { - item.amount++; - } - } else { - maxQuantityMap[url] = { - amount: 1, - sign: controller - }; + if (item && item.length > 0) { + if (item.length >= maxQuantity) { + const firstSign = item.shift(); + firstSign?.sign.abort(); } - return controller; + item.push({ id, sign }); + } else { + maxQuantityMap[url] = [{ id, sign }]; } + return { + id, + abortSignal: sign?.signal + }; } -function requestFinish({ url }: { url: string }) { + +function requestFinish({ signId, url }: { signId?: string; url: string }) { const item = maxQuantityMap[url]; if (item) { - item.amount--; - if (item.amount <= 0) { + if (signId) { + const index = item.findIndex((item) => item.id === signId); + if (index !== -1) { + item.splice(index, 1); + } + } + if (item.length <= 0) { delete maxQuantityMap[url]; } } @@ -165,7 +173,7 @@ function request( } } - const controller = checkMaxQuantity({ url, maxQuantity }); + const { id: signId, abortSignal } = checkMaxQuantity({ url, maxQuantity }); return instance .request({ @@ -174,13 +182,13 @@ function request( method, data: ['POST', 'PUT'].includes(method) ? data : undefined, params: !['POST', 'PUT'].includes(method) ? data : undefined, - signal: cancelToken?.signal ?? controller?.signal, + signal: cancelToken?.signal ?? abortSignal, withCredentials, ...config // 用户自定义配置,可以覆盖前面的配置 }) .then((res) => checkRes(res.data)) .catch((err) => responseError(err)) - .finally(() => requestFinish({ url })); + .finally(() => requestFinish({ signId, url })); } /** diff --git a/projects/app/src/web/core/app/api.ts b/projects/app/src/web/core/app/api.ts index d1908a48e..f466c3fd7 100644 --- a/projects/app/src/web/core/app/api.ts +++ b/projects/app/src/web/core/app/api.ts @@ -39,7 +39,7 @@ export const putAppById = (id: string, data: AppUpdateParams) => // =================== chat logs export const getAppChatLogs = (data: GetAppChatLogsParams) => - POST>(`/core/app/getChatLogs`, data); + POST>(`/core/app/getChatLogs`, data, { maxQuantity: 1 }); export const resumeInheritPer = (appId: string) => GET(`/core/app/resumeInheritPermission`, { appId });