Chat perf test (#2252)

* perf: optimize chat init api (#2233)

* perf: optimize the chat/init api

* perf: 添加团队和分享api的优化

* perf: api写法优化

* perf: api写法优化

* perf: 完善细节

* perf: 添加auth字段

* perf: 优雅的写法🥳

* fix: Fix the bug in debugging Tag (#2250)

* fix: 修复调试tag不显示bug

* perf

* perf: 优化代码

* fix: 返回新对象

* fix: show tag error

---------

Co-authored-by: papapatrick <109422393+Patrickill@users.noreply.github.com>
This commit is contained in:
Archer
2024-08-03 10:44:31 +08:00
committed by GitHub
parent af1cff6230
commit 9f37e56173
16 changed files with 235 additions and 113 deletions

View File

@@ -106,9 +106,16 @@ export type AdminFbkType = {
}; };
/* --------- chat item ---------- */ /* --------- chat item ---------- */
export type ResponseTagItemType = {
totalRunningTime?: number;
totalQuoteList?: SearchDataResponseItemType[];
llmModuleAccount?: number;
historyPreviewLength?: number;
};
export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & { export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
dataId?: string; dataId?: string;
}; } & ResponseTagItemType;
export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & { export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
dataId: string; dataId: string;
@@ -116,7 +123,8 @@ export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatIt
moduleName?: string; moduleName?: string;
ttsBuffer?: Uint8Array; ttsBuffer?: Uint8Array;
responseData?: ChatHistoryItemResType[]; responseData?: ChatHistoryItemResType[];
} & ChatBoxInputType; } & ChatBoxInputType &
ResponseTagItemType;
/* --------- team chat --------- */ /* --------- team chat --------- */
export type ChatAppListSchema = { export type ChatAppListSchema = {

View File

@@ -1,4 +1,4 @@
import React, { useState, useMemo } from 'react'; import React, { useState, useMemo, useCallback } from 'react';
import { useAudioPlay } from '@/web/common/utils/voice'; import { useAudioPlay } from '@/web/common/utils/voice';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { import {
@@ -8,7 +8,7 @@ import {
ChatInputGuideConfigType, ChatInputGuideConfigType,
VariableItemType VariableItemType
} from '@fastgpt/global/core/app/type'; } from '@fastgpt/global/core/app/type';
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type'; import { ChatHistoryItemResType, ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import { import {
defaultChatInputGuideConfig, defaultChatInputGuideConfig,
defaultTTSConfig, defaultTTSConfig,
@@ -17,14 +17,16 @@ import {
import { createContext } from 'use-context-selector'; import { createContext } from 'use-context-selector';
import { FieldValues, UseFormReturn } from 'react-hook-form'; import { FieldValues, UseFormReturn } from 'react-hook-form';
import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants'; import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
import { getChatResData } from '@/web/core/chat/api';
export type ChatProviderProps = OutLinkChatAuthProps & { export type ChatProviderProps = OutLinkChatAuthProps & {
appAvatar?: string; appAvatar?: string;
appId: string;
chatConfig?: AppChatConfigType; chatConfig?: AppChatConfigType;
chatHistories: ChatSiteItemType[]; chatHistories: ChatSiteItemType[];
setChatHistories: React.Dispatch<React.SetStateAction<ChatSiteItemType[]>>; setChatHistories: React.Dispatch<React.SetStateAction<ChatSiteItemType[]>>;
variablesForm: UseFormReturn<FieldValues, any>; variablesForm: UseFormReturn<FieldValues, any>;
// not chat test params // not chat test params
@@ -61,6 +63,7 @@ type useChatStoreType = OutLinkChatAuthProps &
isChatting: boolean; isChatting: boolean;
chatInputGuide: ChatInputGuideConfigType; chatInputGuide: ChatInputGuideConfigType;
outLinkAuthData: OutLinkChatAuthProps; outLinkAuthData: OutLinkChatAuthProps;
getHistoryResponseData: ({ dataId }: { dataId: string }) => Promise<ChatHistoryItemResType[]>;
}; };
export const ChatBoxContext = createContext<useChatStoreType>({ export const ChatBoxContext = createContext<useChatStoreType>({
@@ -181,7 +184,26 @@ const Provider = ({
chatHistories[chatHistories.length - 1]?.status !== 'finish', chatHistories[chatHistories.length - 1]?.status !== 'finish',
[chatHistories] [chatHistories]
); );
const getHistoryResponseData = useCallback(
async ({ dataId }: { dataId: string }) => {
const aimItem = chatHistories.find((item) => item.dataId === dataId)!;
if (!!aimItem?.responseData || !props.chatId) {
return aimItem.responseData || [];
} else {
let resData = await getChatResData({
appId: props.appId,
chatId: props.chatId,
dataId,
...outLinkAuthData
});
setChatHistories((state) =>
state.map((item) => (item.dataId === dataId ? { ...item, responseData: resData } : item))
);
return resData;
}
},
[chatHistories, outLinkAuthData, props.appId, props.chatId, setChatHistories]
);
const value: useChatStoreType = { const value: useChatStoreType = {
...props, ...props,
shareId, shareId,
@@ -210,7 +232,8 @@ const Provider = ({
isChatting, isChatting,
chatInputGuide, chatInputGuide,
outLinkAuthData, outLinkAuthData,
variablesForm variablesForm,
getHistoryResponseData
}; };
return <ChatBoxContext.Provider value={value}>{children}</ChatBoxContext.Provider>; return <ChatBoxContext.Provider value={value}>{children}</ChatBoxContext.Provider>;

View File

@@ -1,23 +1,40 @@
import React from 'react'; import React from 'react';
import { ModalBody, Box, useTheme } from '@chakra-ui/react'; import { ModalBody, Box } from '@chakra-ui/react';
import MyModal from '@fastgpt/web/components/common/MyModal'; import MyModal from '@fastgpt/web/components/common/MyModal';
import { DispatchNodeResponseType } from '@fastgpt/global/core/workflow/runtime/type.d'; import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../Provider';
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
const ContextModal = ({ const ContextModal = ({ onClose, dataId }: { onClose: () => void; dataId: string }) => {
context = [], const { getHistoryResponseData } = useContextSelector(ChatBoxContext, (v) => v);
onClose
}: {
context: DispatchNodeResponseType['historyPreview'];
onClose: () => void;
}) => {
const theme = useTheme();
const { loading: isLoading, data: contextModalData } = useRequest2(
() =>
getHistoryResponseData({ dataId }).then((res) => {
const flatResData: ChatHistoryItemResType[] =
res
?.map((item) => {
if (item.pluginDetail || item.toolDetail) {
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
}
return item;
})
.flat() || [];
return flatResData.find(isLLMNode)?.historyPreview || [];
}),
{ manual: false }
);
return ( return (
<MyModal <MyModal
isOpen={true} isOpen={true}
onClose={onClose} onClose={onClose}
isLoading={isLoading}
iconSrc="/imgs/modal/chatHistory.svg" iconSrc="/imgs/modal/chatHistory.svg"
title={`上下文预览(${context.length}条)`} title={`上下文预览(${contextModalData?.length || 0}条)`}
h={['90vh', '80vh']} h={['90vh', '80vh']}
minW={['90vw', '600px']} minW={['90vw', '600px']}
isCentered isCentered
@@ -28,12 +45,12 @@ const ContextModal = ({
wordBreak={'break-all'} wordBreak={'break-all'}
fontSize={'sm'} fontSize={'sm'}
> >
{context.map((item, i) => ( {contextModalData?.map((item, i) => (
<Box <Box
key={i} key={i}
p={2} p={2}
borderRadius={'md'} borderRadius={'md'}
border={theme.borders.base} border={'base'}
_notLast={{ mb: 2 }} _notLast={{ mb: 2 }}
position={'relative'} position={'relative'}
> >

View File

@@ -1,37 +1,41 @@
import React, { useEffect, useMemo, useState } from 'react'; import React, { useMemo, useState } from 'react';
import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d'; import { Flex, useDisclosure, Box } from '@chakra-ui/react';
import { DispatchNodeResponseType } from '@fastgpt/global/core/workflow/runtime/type.d';
import { Flex, useDisclosure, Box, Collapse } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next'; import { useTranslation } from 'next-i18next';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type'; import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import dynamic from 'next/dynamic'; import dynamic from 'next/dynamic';
import MyTag from '@fastgpt/web/components/common/Tag/index'; import MyTag from '@fastgpt/web/components/common/Tag/index';
import MyTooltip from '@fastgpt/web/components/common/MyTooltip'; import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils'; import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
import ChatBoxDivider from '@/components/core/chat/Divider'; import ChatBoxDivider from '@/components/core/chat/Divider';
import { strIsLink } from '@fastgpt/global/common/string/tools'; import { strIsLink } from '@fastgpt/global/common/string/tools';
import MyIcon from '@fastgpt/web/components/common/Icon'; import MyIcon from '@fastgpt/web/components/common/Icon';
import { useSystem } from '@fastgpt/web/hooks/useSystem'; import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useSize } from 'ahooks'; import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
import { addStatisticalDataToHistoryItem } from '@/global/core/chat/utils';
const QuoteModal = dynamic(() => import('./QuoteModal')); const QuoteModal = dynamic(() => import('./QuoteModal'));
const ContextModal = dynamic(() => import('./ContextModal')); const ContextModal = dynamic(() => import('./ContextModal'));
const WholeResponseModal = dynamic(() => import('../../../components/WholeResponseModal')); const WholeResponseModal = dynamic(() => import('../../../components/WholeResponseModal'));
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
const ResponseTags = ({ const ResponseTags = ({
flowResponses = [], showTags,
showDetail showDetail,
historyItem
}: { }: {
flowResponses?: ChatHistoryItemResType[]; showTags: boolean;
showDetail: boolean; showDetail: boolean;
historyItem: ChatSiteItemType;
}) => { }) => {
const { isPc } = useSystem(); const { isPc } = useSystem();
const { t } = useTranslation(); const { t } = useTranslation();
const quoteListRef = React.useRef<HTMLDivElement>(null); const quoteListRef = React.useRef<HTMLDivElement>(null);
const dataId = historyItem.dataId;
const {
totalQuoteList: quoteList = [],
llmModuleAccount = 0,
totalRunningTime: runningTime = 0,
historyPreviewLength = 0
} = useMemo(() => addStatisticalDataToHistoryItem(historyItem), [historyItem]);
const [quoteModalData, setQuoteModalData] = useState<{ const [quoteModalData, setQuoteModalData] = useState<{
rawSearch: SearchDataResponseItemType[]; rawSearch: SearchDataResponseItemType[];
metadata?: { metadata?: {
@@ -41,69 +45,40 @@ const ResponseTags = ({
}; };
}>(); }>();
const [quoteFolded, setQuoteFolded] = useState<boolean>(true); const [quoteFolded, setQuoteFolded] = useState<boolean>(true);
const [contextModalData, setContextModalData] =
useState<DispatchNodeResponseType['historyPreview']>();
const { const {
isOpen: isOpenWholeModal, isOpen: isOpenWholeModal,
onOpen: onOpenWholeModal, onOpen: onOpenWholeModal,
onClose: onCloseWholeModal onClose: onCloseWholeModal
} = useDisclosure(); } = useDisclosure();
const {
const quoteListSize = useSize(quoteListRef); isOpen: isOpenContextModal,
onOpen: onOpenContextModal,
onClose: onCloseContextModal
} = useDisclosure();
const quoteIsOverflow = quoteListRef.current const quoteIsOverflow = quoteListRef.current
? quoteListRef.current.scrollHeight > (isPc ? 50 : 55) ? quoteListRef.current.scrollHeight > (isPc ? 50 : 55)
: true; : true;
const { const sourceList = useMemo(() => {
llmModuleAccount, return Object.values(
quoteList = [], quoteList.reduce((acc: Record<string, SearchDataResponseItemType[]>, cur) => {
sourceList = [],
historyPreview = [],
runningTime = 0
} = useMemo(() => {
const flatResponse = flowResponses
.map((item) => {
if (item.pluginDetail || item.toolDetail) {
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
}
return item;
})
.flat();
const chatData = flatResponse.find(isLLMNode);
const quoteList = flatResponse
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
.map((item) => item.quoteList)
.flat()
.filter(Boolean) as SearchDataResponseItemType[];
const sourceList = quoteList.reduce(
(acc: Record<string, SearchDataResponseItemType[]>, cur) => {
if (!acc[cur.collectionId]) { if (!acc[cur.collectionId]) {
acc[cur.collectionId] = [cur]; acc[cur.collectionId] = [cur];
} }
return acc; return acc;
}, }, {})
{} )
); .flat()
return { .map((item) => ({
llmModuleAccount: flatResponse.filter(isLLMNode).length, sourceName: item.sourceName,
quoteList, sourceId: item.sourceId,
sourceList: Object.values(sourceList) icon: getSourceNameIcon({ sourceId: item.sourceId, sourceName: item.sourceName }),
.flat() canReadQuote: showDetail || strIsLink(item.sourceId),
.map((item) => ({ collectionId: item.collectionId
sourceName: item.sourceName, }));
sourceId: item.sourceId, }, [quoteList, showDetail]);
icon: getSourceNameIcon({ sourceId: item.sourceId, sourceName: item.sourceName }),
canReadQuote: showDetail || strIsLink(item.sourceId),
collectionId: item.collectionId
})),
historyPreview: chatData?.historyPreview,
runningTime: +flowResponses.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
};
}, [showDetail, flowResponses]);
return flowResponses.length === 0 ? null : ( return !showTags ? null : (
<> <>
{sourceList.length > 0 && ( {sourceList.length > 0 && (
<> <>
@@ -213,15 +188,15 @@ const ResponseTags = ({
)} )}
{llmModuleAccount === 1 && ( {llmModuleAccount === 1 && (
<> <>
{historyPreview.length > 0 && ( {historyPreviewLength > 0 && (
<MyTooltip label={'点击查看上下文预览'}> <MyTooltip label={'点击查看上下文预览'}>
<MyTag <MyTag
colorSchema="green" colorSchema="green"
cursor={'pointer'} cursor={'pointer'}
type="borderSolid" type="borderSolid"
onClick={() => setContextModalData(historyPreview)} onClick={onOpenContextModal}
> >
{historyPreview.length} {historyPreviewLength}
</MyTag> </MyTag>
</MyTooltip> </MyTooltip>
)} )}
@@ -259,15 +234,9 @@ const ResponseTags = ({
onClose={() => setQuoteModalData(undefined)} onClose={() => setQuoteModalData(undefined)}
/> />
)} )}
{!!contextModalData && ( {isOpenContextModal && <ContextModal dataId={dataId} onClose={onCloseContextModal} />}
<ContextModal context={contextModalData} onClose={() => setContextModalData(undefined)} />
)}
{isOpenWholeModal && ( {isOpenWholeModal && (
<WholeResponseModal <WholeResponseModal dataId={dataId} showDetail={showDetail} onClose={onCloseWholeModal} />
response={flowResponses}
showDetail={showDetail}
onClose={onCloseWholeModal}
/>
)} )}
</> </>
); );

View File

@@ -11,6 +11,7 @@ import React, {
import Script from 'next/script'; import Script from 'next/script';
import type { import type {
AIChatItemValueItemType, AIChatItemValueItemType,
ChatHistoryItemResType,
ChatSiteItemType, ChatSiteItemType,
UserChatItemValueItemType UserChatItemValueItemType
} from '@fastgpt/global/core/chat/type.d'; } from '@fastgpt/global/core/chat/type.d';
@@ -543,6 +544,7 @@ const ChatBox = (
}, },
[ [
abortRequest, abortRequest,
allVariableList,
chatHistories, chatHistories,
createQuestionGuide, createQuestionGuide,
finishSegmentedAudio, finishSegmentedAudio,
@@ -559,7 +561,6 @@ const ChatBox = (
startSegmentedAudio, startSegmentedAudio,
t, t,
toast, toast,
variableList,
variablesForm variablesForm
] ]
); );
@@ -874,7 +875,6 @@ const ChatBox = (
}); });
} }
})); }));
return ( return (
<Flex flexDirection={'column'} h={'100%'} position={'relative'}> <Flex flexDirection={'column'} h={'100%'} position={'relative'}>
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script> <Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
@@ -927,8 +927,9 @@ const ChatBox = (
})} })}
> >
<ResponseTags <ResponseTags
flowResponses={item.responseData} showTags={index !== chatHistories.length - 1 || !isChatting}
showDetail={!shareId && !teamId} showDetail={!shareId && !teamId}
historyItem={item}
/> />
{/* custom feedback */} {/* custom feedback */}

View File

@@ -49,7 +49,6 @@ export const useChat = () => {
ChatBoxRef.current?.restartChat?.(); ChatBoxRef.current?.restartChat?.();
}, [variablesForm]); }, [variablesForm]);
return { return {
ChatBoxRef, ChatBoxRef,
chatRecords, chatRecords,

View File

@@ -13,6 +13,9 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import Avatar from '@fastgpt/web/components/common/Avatar'; import Avatar from '@fastgpt/web/components/common/Avatar';
import { useSystem } from '@fastgpt/web/hooks/useSystem'; import { useSystem } from '@fastgpt/web/hooks/useSystem';
import MyIcon from '@fastgpt/web/components/common/Icon'; import MyIcon from '@fastgpt/web/components/common/Icon';
import { useContextSelector } from 'use-context-selector';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
type sideTabItemType = { type sideTabItemType = {
moduleLogo?: string; moduleLogo?: string;
@@ -85,33 +88,42 @@ function Row({
} }
const WholeResponseModal = ({ const WholeResponseModal = ({
response,
showDetail, showDetail,
onClose onClose,
dataId
}: { }: {
response: ChatHistoryItemResType[];
showDetail: boolean; showDetail: boolean;
onClose: () => void; onClose: () => void;
dataId: string;
}) => { }) => {
const { t } = useTranslation(); const { t } = useTranslation();
const { appId, chatId, getHistoryResponseData } = useContextSelector(ChatBoxContext, (v) => v);
const { loading: isLoading, data: response } = useRequest2(
() => getHistoryResponseData({ dataId }),
{
manual: false
}
);
return ( return (
<MyModal <MyModal
isCentered isCentered
isOpen={true} isOpen={true}
onClose={onClose} onClose={onClose}
h={['90vh', '80vh']} h={['90vh', '80vh']}
isLoading={isLoading}
maxH={['90vh', '700px']} maxH={['90vh', '700px']}
minW={['90vw', '880px']} minW={['90vw', '880px']}
iconSrc="/imgs/modal/wholeRecord.svg" iconSrc="/imgs/modal/wholeRecord.svg"
title={ title={
<Flex alignItems={'center'}> <Flex alignItems={'center'}>
{t('common:core.chat.response.Complete Response')} {t('common:core.chat.response.Complete Response')}
<QuestionTip ml={2} label={'从左往右,为各个模块的响应顺序'}></QuestionTip> <QuestionTip ml={2} label={'从上到下,为各个模块的响应顺序'}></QuestionTip>
</Flex> </Flex>
} }
> >
<ResponseBox response={response} showDetail={showDetail} /> {response?.length && <ResponseBox response={response} showDetail={showDetail} />}
</MyModal> </MyModal>
); );
}; };
@@ -135,6 +147,7 @@ export const ResponseBox = React.memo(function ResponseBox({
const [currentNodeId, setCurrentNodeId] = useState( const [currentNodeId, setCurrentNodeId] = useState(
flattedResponse[0]?.nodeId ? flattedResponse[0].nodeId : '' flattedResponse[0]?.nodeId ? flattedResponse[0].nodeId : ''
); );
const activeModule = useMemo( const activeModule = useMemo(
() => flattedResponse.find((item) => item.nodeId === currentNodeId) as ChatHistoryItemResType, () => flattedResponse.find((item) => item.nodeId === currentNodeId) as ChatHistoryItemResType,
[currentNodeId, flattedResponse] [currentNodeId, flattedResponse]

View File

@@ -0,0 +1,43 @@
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { ChatHistoryItemResType, ChatItemType } from '@fastgpt/global/core/chat/type';
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
export function transformPreviewHistories(histories: ChatItemType[]) {
return histories.map((item) => {
return {
...addStatisticalDataToHistoryItem(item),
responseData: undefined
};
});
}
export function addStatisticalDataToHistoryItem(historyItem: ChatItemType) {
if (historyItem.obj !== ChatRoleEnum.AI) return historyItem;
if (historyItem.totalQuoteList !== undefined) return historyItem;
const flatResData: ChatHistoryItemResType[] =
historyItem.responseData
?.map((item) => {
if (item.pluginDetail || item.toolDetail) {
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
}
return item;
})
.flat() || [];
return {
...historyItem,
llmModuleAccount: flatResData.filter(isLLMNode).length,
totalQuoteList: flatResData
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
.map((item) => item.quoteList)
.flat()
.filter(Boolean) as SearchDataResponseItemType[],
totalRunningTime: Number(
flatResData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
),
historyPreviewLength: flatResData.find(isLLMNode)?.historyPreview?.length
};
}

View File

@@ -0,0 +1,46 @@
import { authChatCrud } from '@/service/support/permission/auth/chat';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
export type getResDataQuery = OutLinkChatAuthProps & {
chatId?: string;
dataId: string;
appId: string;
};
export type getResDataBody = {};
export type getResDataResponse = ChatHistoryItemResType[] | {};
async function handler(
req: ApiRequestProps<getResDataBody, getResDataQuery>,
res: ApiResponseType<any>
): Promise<getResDataResponse> {
const { appId, chatId, dataId } = req.query;
if (!appId || !chatId || !dataId) {
return {};
}
await authChatCrud({
req,
authToken: true,
...req.query,
per: ReadPermissionVal
});
const chatData = await MongoChatItem.findOne({
appId,
chatId,
dataId
});
if (chatData?.obj === ChatRoleEnum.AI) {
return chatData.responseData || {};
} else return {};
}
export default NextAPI(handler);

View File

@@ -12,7 +12,8 @@ import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { NextAPI } from '@/service/middleware/entry'; import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant'; import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { transformPreviewHistories } from '@/global/core/chat/utils';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
async function handler( async function handler(
req: NextApiRequest, req: NextApiRequest,
res: NextApiResponse res: NextApiResponse
@@ -63,7 +64,7 @@ async function handler(
title: chat?.title || '新对话', title: chat?.title || '新对话',
userAvatar: undefined, userAvatar: undefined,
variables: chat?.variables || {}, variables: chat?.variables || {},
history: histories, history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: { app: {
chatConfig: getAppChatConfig({ chatConfig: getAppChatConfig({
chatConfig: app.chatConfig, chatConfig: app.chatConfig,

View File

@@ -17,7 +17,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller'; import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants'; import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { transformPreviewHistories } from '@/global/core/chat/utils';
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
await connectToDatabase(); await connectToDatabase();
@@ -73,7 +73,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
//@ts-ignore //@ts-ignore
userAvatar: tmb?.userId?.avatar, userAvatar: tmb?.userId?.avatar,
variables: chat?.variables || {}, variables: chat?.variables || {},
history: histories, history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: { app: {
chatConfig: getAppChatConfig({ chatConfig: getAppChatConfig({
chatConfig: app.chatConfig, chatConfig: app.chatConfig,

View File

@@ -17,6 +17,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller'; import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants'; import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { transformPreviewHistories } from '@/global/core/chat/utils';
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
@@ -74,7 +75,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
title: chat?.title || '新对话', title: chat?.title || '新对话',
userAvatar: team?.avatar, userAvatar: team?.avatar,
variables: chat?.variables || {}, variables: chat?.variables || {},
history: histories, history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: { app: {
chatConfig: getAppChatConfig({ chatConfig: getAppChatConfig({
chatConfig: app.chatConfig, chatConfig: app.chatConfig,

View File

@@ -3,7 +3,6 @@ import NextHead from '@/components/common/NextHead';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { delChatRecordById, getChatHistories, getInitChatInfo } from '@/web/core/chat/api'; import { delChatRecordById, getChatHistories, getInitChatInfo } from '@/web/core/chat/api';
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react'; import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { streamFetch } from '@/web/common/api/fetch'; import { streamFetch } from '@/web/common/api/fetch';
import { useChatStore } from '@/web/core/chat/context/storeChat'; import { useChatStore } from '@/web/core/chat/context/storeChat';
import { useToast } from '@fastgpt/web/hooks/useToast'; import { useToast } from '@fastgpt/web/hooks/useToast';

View File

@@ -1,7 +1,6 @@
import React, { useCallback, useRef, useState } from 'react'; import React, { useCallback, useRef, useState } from 'react';
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent } from '@chakra-ui/react'; import { Box, Flex, Drawer, DrawerOverlay, DrawerContent } from '@chakra-ui/react';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import { streamFetch } from '@/web/common/api/fetch'; import { streamFetch } from '@/web/common/api/fetch';
import { useShareChatStore } from '@/web/core/chat/storeShareChat'; import { useShareChatStore } from '@/web/core/chat/storeShareChat';
import SideBar from '@/components/SideBar'; import SideBar from '@/components/SideBar';

View File

@@ -4,7 +4,6 @@ import { delChatRecordById, getChatHistories, getTeamChatInfo } from '@/web/core
import { useRouter } from 'next/router'; import { useRouter } from 'next/router';
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react'; import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react';
import { useToast } from '@fastgpt/web/hooks/useToast'; import { useToast } from '@fastgpt/web/hooks/useToast';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import SideBar from '@/components/SideBar'; import SideBar from '@/components/SideBar';
import PageContainer from '@/components/PageContainer'; import PageContainer from '@/components/PageContainer';
import { getMyTokensApps } from '@/web/core/chat/api'; import { getMyTokensApps } from '@/web/core/chat/api';

View File

@@ -1,6 +1,6 @@
import { GET, POST, DELETE, PUT } from '@/web/common/api/request'; import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
import type { ChatHistoryItemType, ChatAppListSchema } from '@fastgpt/global/core/chat/type.d'; import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import { getResDataQuery } from '@/pages/api/core/chat/getResData';
import type { import type {
CloseCustomFeedbackParams, CloseCustomFeedbackParams,
InitChatProps, InitChatProps,
@@ -9,6 +9,7 @@ import type {
GetHistoriesProps, GetHistoriesProps,
InitTeamChatProps InitTeamChatProps
} from '@/global/core/chat/api.d'; } from '@/global/core/chat/api.d';
import type { import type {
AdminUpdateFeedbackParams, AdminUpdateFeedbackParams,
ClearHistoriesProps, ClearHistoriesProps,
@@ -29,13 +30,16 @@ export const getInitOutLinkChatInfo = (data: InitOutLinkChatProps) =>
GET<InitChatResponse>(`/core/chat/outLink/init`, data); GET<InitChatResponse>(`/core/chat/outLink/init`, data);
export const getTeamChatInfo = (data: InitTeamChatProps) => export const getTeamChatInfo = (data: InitTeamChatProps) =>
GET<InitChatResponse>(`/core/chat/team/init`, data); GET<InitChatResponse>(`/core/chat/team/init`, data);
/** /**
* get current window history(appid or shareId) * get current window history(appid or shareId)
*/ */
export const getChatHistories = (data: GetHistoriesProps) => export const getChatHistories = (data: GetHistoriesProps) =>
POST<ChatHistoryItemType[]>('/core/chat/getHistories', data); POST<ChatHistoryItemType[]>('/core/chat/getHistories', data);
/**
* get detail responseData by dataId appId chatId
*/
export const getChatResData = (data: getResDataQuery) =>
GET<ChatHistoryItemResType[]>(`/core/chat/getResData`, data);
/** /**
* delete one history * delete one history
*/ */