mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
Chat perf test (#2252)
* perf: optimize chat init api (#2233) * perf: optimize the chat/init api * perf: 添加团队和分享api的优化 * perf: api写法优化 * perf: api写法优化 * perf: 完善细节 * perf: 添加auth字段 * perf: 优雅的写法🥳 * fix: Fix the bug in debugging Tag (#2250) * fix: 修复调试tag不显示bug * perf * perf: 优化代码 * fix: 返回新对象 * fix: show tag error --------- Co-authored-by: papapatrick <109422393+Patrickill@users.noreply.github.com>
This commit is contained in:
12
packages/global/core/chat/type.d.ts
vendored
12
packages/global/core/chat/type.d.ts
vendored
@@ -106,9 +106,16 @@ export type AdminFbkType = {
|
||||
};
|
||||
|
||||
/* --------- chat item ---------- */
|
||||
export type ResponseTagItemType = {
|
||||
totalRunningTime?: number;
|
||||
totalQuoteList?: SearchDataResponseItemType[];
|
||||
llmModuleAccount?: number;
|
||||
historyPreviewLength?: number;
|
||||
};
|
||||
|
||||
export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
|
||||
dataId?: string;
|
||||
};
|
||||
} & ResponseTagItemType;
|
||||
|
||||
export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
|
||||
dataId: string;
|
||||
@@ -116,7 +123,8 @@ export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatIt
|
||||
moduleName?: string;
|
||||
ttsBuffer?: Uint8Array;
|
||||
responseData?: ChatHistoryItemResType[];
|
||||
} & ChatBoxInputType;
|
||||
} & ChatBoxInputType &
|
||||
ResponseTagItemType;
|
||||
|
||||
/* --------- team chat --------- */
|
||||
export type ChatAppListSchema = {
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import React, { useState, useMemo } from 'react';
|
||||
import React, { useState, useMemo, useCallback } from 'react';
|
||||
import { useAudioPlay } from '@/web/common/utils/voice';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import {
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
ChatInputGuideConfigType,
|
||||
VariableItemType
|
||||
} from '@fastgpt/global/core/app/type';
|
||||
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { ChatHistoryItemResType, ChatSiteItemType } from '@fastgpt/global/core/chat/type';
|
||||
import {
|
||||
defaultChatInputGuideConfig,
|
||||
defaultTTSConfig,
|
||||
@@ -17,14 +17,16 @@ import {
|
||||
import { createContext } from 'use-context-selector';
|
||||
import { FieldValues, UseFormReturn } from 'react-hook-form';
|
||||
import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { getChatResData } from '@/web/core/chat/api';
|
||||
|
||||
export type ChatProviderProps = OutLinkChatAuthProps & {
|
||||
appAvatar?: string;
|
||||
|
||||
appId: string;
|
||||
chatConfig?: AppChatConfigType;
|
||||
|
||||
chatHistories: ChatSiteItemType[];
|
||||
setChatHistories: React.Dispatch<React.SetStateAction<ChatSiteItemType[]>>;
|
||||
|
||||
variablesForm: UseFormReturn<FieldValues, any>;
|
||||
|
||||
// not chat test params
|
||||
@@ -61,6 +63,7 @@ type useChatStoreType = OutLinkChatAuthProps &
|
||||
isChatting: boolean;
|
||||
chatInputGuide: ChatInputGuideConfigType;
|
||||
outLinkAuthData: OutLinkChatAuthProps;
|
||||
getHistoryResponseData: ({ dataId }: { dataId: string }) => Promise<ChatHistoryItemResType[]>;
|
||||
};
|
||||
|
||||
export const ChatBoxContext = createContext<useChatStoreType>({
|
||||
@@ -181,7 +184,26 @@ const Provider = ({
|
||||
chatHistories[chatHistories.length - 1]?.status !== 'finish',
|
||||
[chatHistories]
|
||||
);
|
||||
|
||||
const getHistoryResponseData = useCallback(
|
||||
async ({ dataId }: { dataId: string }) => {
|
||||
const aimItem = chatHistories.find((item) => item.dataId === dataId)!;
|
||||
if (!!aimItem?.responseData || !props.chatId) {
|
||||
return aimItem.responseData || [];
|
||||
} else {
|
||||
let resData = await getChatResData({
|
||||
appId: props.appId,
|
||||
chatId: props.chatId,
|
||||
dataId,
|
||||
...outLinkAuthData
|
||||
});
|
||||
setChatHistories((state) =>
|
||||
state.map((item) => (item.dataId === dataId ? { ...item, responseData: resData } : item))
|
||||
);
|
||||
return resData;
|
||||
}
|
||||
},
|
||||
[chatHistories, outLinkAuthData, props.appId, props.chatId, setChatHistories]
|
||||
);
|
||||
const value: useChatStoreType = {
|
||||
...props,
|
||||
shareId,
|
||||
@@ -210,7 +232,8 @@ const Provider = ({
|
||||
isChatting,
|
||||
chatInputGuide,
|
||||
outLinkAuthData,
|
||||
variablesForm
|
||||
variablesForm,
|
||||
getHistoryResponseData
|
||||
};
|
||||
|
||||
return <ChatBoxContext.Provider value={value}>{children}</ChatBoxContext.Provider>;
|
||||
|
@@ -1,23 +1,40 @@
|
||||
import React from 'react';
|
||||
import { ModalBody, Box, useTheme } from '@chakra-ui/react';
|
||||
import { ModalBody, Box } from '@chakra-ui/react';
|
||||
import MyModal from '@fastgpt/web/components/common/MyModal';
|
||||
import { DispatchNodeResponseType } from '@fastgpt/global/core/workflow/runtime/type.d';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { ChatBoxContext } from '../Provider';
|
||||
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
const isLLMNode = (item: ChatHistoryItemResType) =>
|
||||
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
|
||||
|
||||
const ContextModal = ({
|
||||
context = [],
|
||||
onClose
|
||||
}: {
|
||||
context: DispatchNodeResponseType['historyPreview'];
|
||||
onClose: () => void;
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
const ContextModal = ({ onClose, dataId }: { onClose: () => void; dataId: string }) => {
|
||||
const { getHistoryResponseData } = useContextSelector(ChatBoxContext, (v) => v);
|
||||
|
||||
const { loading: isLoading, data: contextModalData } = useRequest2(
|
||||
() =>
|
||||
getHistoryResponseData({ dataId }).then((res) => {
|
||||
const flatResData: ChatHistoryItemResType[] =
|
||||
res
|
||||
?.map((item) => {
|
||||
if (item.pluginDetail || item.toolDetail) {
|
||||
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
|
||||
}
|
||||
return item;
|
||||
})
|
||||
.flat() || [];
|
||||
return flatResData.find(isLLMNode)?.historyPreview || [];
|
||||
}),
|
||||
{ manual: false }
|
||||
);
|
||||
return (
|
||||
<MyModal
|
||||
isOpen={true}
|
||||
onClose={onClose}
|
||||
isLoading={isLoading}
|
||||
iconSrc="/imgs/modal/chatHistory.svg"
|
||||
title={`上下文预览(${context.length}条)`}
|
||||
title={`上下文预览(${contextModalData?.length || 0}条)`}
|
||||
h={['90vh', '80vh']}
|
||||
minW={['90vw', '600px']}
|
||||
isCentered
|
||||
@@ -28,12 +45,12 @@ const ContextModal = ({
|
||||
wordBreak={'break-all'}
|
||||
fontSize={'sm'}
|
||||
>
|
||||
{context.map((item, i) => (
|
||||
{contextModalData?.map((item, i) => (
|
||||
<Box
|
||||
key={i}
|
||||
p={2}
|
||||
borderRadius={'md'}
|
||||
border={theme.borders.base}
|
||||
border={'base'}
|
||||
_notLast={{ mb: 2 }}
|
||||
position={'relative'}
|
||||
>
|
||||
|
@@ -1,37 +1,41 @@
|
||||
import React, { useEffect, useMemo, useState } from 'react';
|
||||
import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { DispatchNodeResponseType } from '@fastgpt/global/core/workflow/runtime/type.d';
|
||||
import { Flex, useDisclosure, Box, Collapse } from '@chakra-ui/react';
|
||||
import React, { useMemo, useState } from 'react';
|
||||
import { Flex, useDisclosure, Box } from '@chakra-ui/react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import dynamic from 'next/dynamic';
|
||||
import MyTag from '@fastgpt/web/components/common/Tag/index';
|
||||
import MyTooltip from '@fastgpt/web/components/common/MyTooltip';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
|
||||
import ChatBoxDivider from '@/components/core/chat/Divider';
|
||||
import { strIsLink } from '@fastgpt/global/common/string/tools';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import { useSize } from 'ahooks';
|
||||
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { addStatisticalDataToHistoryItem } from '@/global/core/chat/utils';
|
||||
|
||||
const QuoteModal = dynamic(() => import('./QuoteModal'));
|
||||
const ContextModal = dynamic(() => import('./ContextModal'));
|
||||
const WholeResponseModal = dynamic(() => import('../../../components/WholeResponseModal'));
|
||||
|
||||
const isLLMNode = (item: ChatHistoryItemResType) =>
|
||||
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
|
||||
|
||||
const ResponseTags = ({
|
||||
flowResponses = [],
|
||||
showDetail
|
||||
showTags,
|
||||
showDetail,
|
||||
historyItem
|
||||
}: {
|
||||
flowResponses?: ChatHistoryItemResType[];
|
||||
showTags: boolean;
|
||||
showDetail: boolean;
|
||||
historyItem: ChatSiteItemType;
|
||||
}) => {
|
||||
const { isPc } = useSystem();
|
||||
const { t } = useTranslation();
|
||||
const quoteListRef = React.useRef<HTMLDivElement>(null);
|
||||
const dataId = historyItem.dataId;
|
||||
const {
|
||||
totalQuoteList: quoteList = [],
|
||||
llmModuleAccount = 0,
|
||||
totalRunningTime: runningTime = 0,
|
||||
historyPreviewLength = 0
|
||||
} = useMemo(() => addStatisticalDataToHistoryItem(historyItem), [historyItem]);
|
||||
const [quoteModalData, setQuoteModalData] = useState<{
|
||||
rawSearch: SearchDataResponseItemType[];
|
||||
metadata?: {
|
||||
@@ -41,69 +45,40 @@ const ResponseTags = ({
|
||||
};
|
||||
}>();
|
||||
const [quoteFolded, setQuoteFolded] = useState<boolean>(true);
|
||||
const [contextModalData, setContextModalData] =
|
||||
useState<DispatchNodeResponseType['historyPreview']>();
|
||||
const {
|
||||
isOpen: isOpenWholeModal,
|
||||
onOpen: onOpenWholeModal,
|
||||
onClose: onCloseWholeModal
|
||||
} = useDisclosure();
|
||||
|
||||
const quoteListSize = useSize(quoteListRef);
|
||||
const {
|
||||
isOpen: isOpenContextModal,
|
||||
onOpen: onOpenContextModal,
|
||||
onClose: onCloseContextModal
|
||||
} = useDisclosure();
|
||||
const quoteIsOverflow = quoteListRef.current
|
||||
? quoteListRef.current.scrollHeight > (isPc ? 50 : 55)
|
||||
: true;
|
||||
|
||||
const {
|
||||
llmModuleAccount,
|
||||
quoteList = [],
|
||||
sourceList = [],
|
||||
historyPreview = [],
|
||||
runningTime = 0
|
||||
} = useMemo(() => {
|
||||
const flatResponse = flowResponses
|
||||
.map((item) => {
|
||||
if (item.pluginDetail || item.toolDetail) {
|
||||
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
|
||||
}
|
||||
return item;
|
||||
})
|
||||
.flat();
|
||||
|
||||
const chatData = flatResponse.find(isLLMNode);
|
||||
const quoteList = flatResponse
|
||||
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
|
||||
.map((item) => item.quoteList)
|
||||
.flat()
|
||||
.filter(Boolean) as SearchDataResponseItemType[];
|
||||
|
||||
const sourceList = quoteList.reduce(
|
||||
(acc: Record<string, SearchDataResponseItemType[]>, cur) => {
|
||||
const sourceList = useMemo(() => {
|
||||
return Object.values(
|
||||
quoteList.reduce((acc: Record<string, SearchDataResponseItemType[]>, cur) => {
|
||||
if (!acc[cur.collectionId]) {
|
||||
acc[cur.collectionId] = [cur];
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
return {
|
||||
llmModuleAccount: flatResponse.filter(isLLMNode).length,
|
||||
quoteList,
|
||||
sourceList: Object.values(sourceList)
|
||||
.flat()
|
||||
.map((item) => ({
|
||||
sourceName: item.sourceName,
|
||||
sourceId: item.sourceId,
|
||||
icon: getSourceNameIcon({ sourceId: item.sourceId, sourceName: item.sourceName }),
|
||||
canReadQuote: showDetail || strIsLink(item.sourceId),
|
||||
collectionId: item.collectionId
|
||||
})),
|
||||
historyPreview: chatData?.historyPreview,
|
||||
runningTime: +flowResponses.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
|
||||
};
|
||||
}, [showDetail, flowResponses]);
|
||||
}, {})
|
||||
)
|
||||
.flat()
|
||||
.map((item) => ({
|
||||
sourceName: item.sourceName,
|
||||
sourceId: item.sourceId,
|
||||
icon: getSourceNameIcon({ sourceId: item.sourceId, sourceName: item.sourceName }),
|
||||
canReadQuote: showDetail || strIsLink(item.sourceId),
|
||||
collectionId: item.collectionId
|
||||
}));
|
||||
}, [quoteList, showDetail]);
|
||||
|
||||
return flowResponses.length === 0 ? null : (
|
||||
return !showTags ? null : (
|
||||
<>
|
||||
{sourceList.length > 0 && (
|
||||
<>
|
||||
@@ -213,15 +188,15 @@ const ResponseTags = ({
|
||||
)}
|
||||
{llmModuleAccount === 1 && (
|
||||
<>
|
||||
{historyPreview.length > 0 && (
|
||||
{historyPreviewLength > 0 && (
|
||||
<MyTooltip label={'点击查看上下文预览'}>
|
||||
<MyTag
|
||||
colorSchema="green"
|
||||
cursor={'pointer'}
|
||||
type="borderSolid"
|
||||
onClick={() => setContextModalData(historyPreview)}
|
||||
onClick={onOpenContextModal}
|
||||
>
|
||||
{historyPreview.length}条上下文
|
||||
{historyPreviewLength}条上下文
|
||||
</MyTag>
|
||||
</MyTooltip>
|
||||
)}
|
||||
@@ -259,15 +234,9 @@ const ResponseTags = ({
|
||||
onClose={() => setQuoteModalData(undefined)}
|
||||
/>
|
||||
)}
|
||||
{!!contextModalData && (
|
||||
<ContextModal context={contextModalData} onClose={() => setContextModalData(undefined)} />
|
||||
)}
|
||||
{isOpenContextModal && <ContextModal dataId={dataId} onClose={onCloseContextModal} />}
|
||||
{isOpenWholeModal && (
|
||||
<WholeResponseModal
|
||||
response={flowResponses}
|
||||
showDetail={showDetail}
|
||||
onClose={onCloseWholeModal}
|
||||
/>
|
||||
<WholeResponseModal dataId={dataId} showDetail={showDetail} onClose={onCloseWholeModal} />
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
@@ -11,6 +11,7 @@ import React, {
|
||||
import Script from 'next/script';
|
||||
import type {
|
||||
AIChatItemValueItemType,
|
||||
ChatHistoryItemResType,
|
||||
ChatSiteItemType,
|
||||
UserChatItemValueItemType
|
||||
} from '@fastgpt/global/core/chat/type.d';
|
||||
@@ -543,6 +544,7 @@ const ChatBox = (
|
||||
},
|
||||
[
|
||||
abortRequest,
|
||||
allVariableList,
|
||||
chatHistories,
|
||||
createQuestionGuide,
|
||||
finishSegmentedAudio,
|
||||
@@ -559,7 +561,6 @@ const ChatBox = (
|
||||
startSegmentedAudio,
|
||||
t,
|
||||
toast,
|
||||
variableList,
|
||||
variablesForm
|
||||
]
|
||||
);
|
||||
@@ -874,7 +875,6 @@ const ChatBox = (
|
||||
});
|
||||
}
|
||||
}));
|
||||
|
||||
return (
|
||||
<Flex flexDirection={'column'} h={'100%'} position={'relative'}>
|
||||
<Script src="/js/html2pdf.bundle.min.js" strategy="lazyOnload"></Script>
|
||||
@@ -927,8 +927,9 @@ const ChatBox = (
|
||||
})}
|
||||
>
|
||||
<ResponseTags
|
||||
flowResponses={item.responseData}
|
||||
showTags={index !== chatHistories.length - 1 || !isChatting}
|
||||
showDetail={!shareId && !teamId}
|
||||
historyItem={item}
|
||||
/>
|
||||
|
||||
{/* custom feedback */}
|
||||
|
@@ -49,7 +49,6 @@ export const useChat = () => {
|
||||
|
||||
ChatBoxRef.current?.restartChat?.();
|
||||
}, [variablesForm]);
|
||||
|
||||
return {
|
||||
ChatBoxRef,
|
||||
chatRecords,
|
||||
|
@@ -13,6 +13,9 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
|
||||
import Avatar from '@fastgpt/web/components/common/Avatar';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
|
||||
type sideTabItemType = {
|
||||
moduleLogo?: string;
|
||||
@@ -85,33 +88,42 @@ function Row({
|
||||
}
|
||||
|
||||
const WholeResponseModal = ({
|
||||
response,
|
||||
showDetail,
|
||||
onClose
|
||||
onClose,
|
||||
dataId
|
||||
}: {
|
||||
response: ChatHistoryItemResType[];
|
||||
showDetail: boolean;
|
||||
onClose: () => void;
|
||||
dataId: string;
|
||||
}) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const { appId, chatId, getHistoryResponseData } = useContextSelector(ChatBoxContext, (v) => v);
|
||||
const { loading: isLoading, data: response } = useRequest2(
|
||||
() => getHistoryResponseData({ dataId }),
|
||||
{
|
||||
manual: false
|
||||
}
|
||||
);
|
||||
|
||||
return (
|
||||
<MyModal
|
||||
isCentered
|
||||
isOpen={true}
|
||||
onClose={onClose}
|
||||
h={['90vh', '80vh']}
|
||||
isLoading={isLoading}
|
||||
maxH={['90vh', '700px']}
|
||||
minW={['90vw', '880px']}
|
||||
iconSrc="/imgs/modal/wholeRecord.svg"
|
||||
title={
|
||||
<Flex alignItems={'center'}>
|
||||
{t('common:core.chat.response.Complete Response')}
|
||||
<QuestionTip ml={2} label={'从左往右,为各个模块的响应顺序'}></QuestionTip>
|
||||
<QuestionTip ml={2} label={'从上到下,为各个模块的响应顺序'}></QuestionTip>
|
||||
</Flex>
|
||||
}
|
||||
>
|
||||
<ResponseBox response={response} showDetail={showDetail} />
|
||||
{response?.length && <ResponseBox response={response} showDetail={showDetail} />}
|
||||
</MyModal>
|
||||
);
|
||||
};
|
||||
@@ -135,6 +147,7 @@ export const ResponseBox = React.memo(function ResponseBox({
|
||||
const [currentNodeId, setCurrentNodeId] = useState(
|
||||
flattedResponse[0]?.nodeId ? flattedResponse[0].nodeId : ''
|
||||
);
|
||||
|
||||
const activeModule = useMemo(
|
||||
() => flattedResponse.find((item) => item.nodeId === currentNodeId) as ChatHistoryItemResType,
|
||||
[currentNodeId, flattedResponse]
|
||||
|
43
projects/app/src/global/core/chat/utils.ts
Normal file
43
projects/app/src/global/core/chat/utils.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatHistoryItemResType, ChatItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
|
||||
const isLLMNode = (item: ChatHistoryItemResType) =>
|
||||
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
|
||||
|
||||
export function transformPreviewHistories(histories: ChatItemType[]) {
|
||||
return histories.map((item) => {
|
||||
return {
|
||||
...addStatisticalDataToHistoryItem(item),
|
||||
responseData: undefined
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function addStatisticalDataToHistoryItem(historyItem: ChatItemType) {
|
||||
if (historyItem.obj !== ChatRoleEnum.AI) return historyItem;
|
||||
if (historyItem.totalQuoteList !== undefined) return historyItem;
|
||||
const flatResData: ChatHistoryItemResType[] =
|
||||
historyItem.responseData
|
||||
?.map((item) => {
|
||||
if (item.pluginDetail || item.toolDetail) {
|
||||
return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])];
|
||||
}
|
||||
return item;
|
||||
})
|
||||
.flat() || [];
|
||||
return {
|
||||
...historyItem,
|
||||
llmModuleAccount: flatResData.filter(isLLMNode).length,
|
||||
totalQuoteList: flatResData
|
||||
.filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode)
|
||||
.map((item) => item.quoteList)
|
||||
.flat()
|
||||
.filter(Boolean) as SearchDataResponseItemType[],
|
||||
totalRunningTime: Number(
|
||||
flatResData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2)
|
||||
),
|
||||
historyPreviewLength: flatResData.find(isLLMNode)?.historyPreview?.length
|
||||
};
|
||||
}
|
46
projects/app/src/pages/api/core/chat/getResData.ts
Normal file
46
projects/app/src/pages/api/core/chat/getResData.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import { authChatCrud } from '@/service/support/permission/auth/chat';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
|
||||
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
|
||||
export type getResDataQuery = OutLinkChatAuthProps & {
|
||||
chatId?: string;
|
||||
dataId: string;
|
||||
appId: string;
|
||||
};
|
||||
|
||||
export type getResDataBody = {};
|
||||
|
||||
export type getResDataResponse = ChatHistoryItemResType[] | {};
|
||||
|
||||
async function handler(
|
||||
req: ApiRequestProps<getResDataBody, getResDataQuery>,
|
||||
res: ApiResponseType<any>
|
||||
): Promise<getResDataResponse> {
|
||||
const { appId, chatId, dataId } = req.query;
|
||||
if (!appId || !chatId || !dataId) {
|
||||
return {};
|
||||
}
|
||||
await authChatCrud({
|
||||
req,
|
||||
authToken: true,
|
||||
...req.query,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
|
||||
const chatData = await MongoChatItem.findOne({
|
||||
appId,
|
||||
chatId,
|
||||
dataId
|
||||
});
|
||||
|
||||
if (chatData?.obj === ChatRoleEnum.AI) {
|
||||
return chatData.responseData || {};
|
||||
} else return {};
|
||||
}
|
||||
|
||||
export default NextAPI(handler);
|
@@ -12,7 +12,8 @@ import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
|
||||
import { transformPreviewHistories } from '@/global/core/chat/utils';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
async function handler(
|
||||
req: NextApiRequest,
|
||||
res: NextApiResponse
|
||||
@@ -63,7 +64,7 @@ async function handler(
|
||||
title: chat?.title || '新对话',
|
||||
userAvatar: undefined,
|
||||
variables: chat?.variables || {},
|
||||
history: histories,
|
||||
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -17,7 +17,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
|
||||
import { transformPreviewHistories } from '@/global/core/chat/utils';
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
await connectToDatabase();
|
||||
@@ -73,7 +73,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
//@ts-ignore
|
||||
userAvatar: tmb?.userId?.avatar,
|
||||
variables: chat?.variables || {},
|
||||
history: histories,
|
||||
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -17,6 +17,7 @@ import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
|
||||
import { transformPreviewHistories } from '@/global/core/chat/utils';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -74,7 +75,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
title: chat?.title || '新对话',
|
||||
userAvatar: team?.avatar,
|
||||
variables: chat?.variables || {},
|
||||
history: histories,
|
||||
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
|
||||
app: {
|
||||
chatConfig: getAppChatConfig({
|
||||
chatConfig: app.chatConfig,
|
||||
|
@@ -3,7 +3,6 @@ import NextHead from '@/components/common/NextHead';
|
||||
import { useRouter } from 'next/router';
|
||||
import { delChatRecordById, getChatHistories, getInitChatInfo } from '@/web/core/chat/api';
|
||||
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
import { useChatStore } from '@/web/core/chat/context/storeChat';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
|
@@ -1,7 +1,6 @@
|
||||
import React, { useCallback, useRef, useState } from 'react';
|
||||
import { useRouter } from 'next/router';
|
||||
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent } from '@chakra-ui/react';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { streamFetch } from '@/web/common/api/fetch';
|
||||
import { useShareChatStore } from '@/web/core/chat/storeShareChat';
|
||||
import SideBar from '@/components/SideBar';
|
||||
|
@@ -4,7 +4,6 @@ import { delChatRecordById, getChatHistories, getTeamChatInfo } from '@/web/core
|
||||
import { useRouter } from 'next/router';
|
||||
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react';
|
||||
import { useToast } from '@fastgpt/web/hooks/useToast';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import SideBar from '@/components/SideBar';
|
||||
import PageContainer from '@/components/PageContainer';
|
||||
import { getMyTokensApps } from '@/web/core/chat/api';
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
|
||||
import type { ChatHistoryItemType, ChatAppListSchema } from '@fastgpt/global/core/chat/type.d';
|
||||
|
||||
import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { getResDataQuery } from '@/pages/api/core/chat/getResData';
|
||||
import type {
|
||||
CloseCustomFeedbackParams,
|
||||
InitChatProps,
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
GetHistoriesProps,
|
||||
InitTeamChatProps
|
||||
} from '@/global/core/chat/api.d';
|
||||
|
||||
import type {
|
||||
AdminUpdateFeedbackParams,
|
||||
ClearHistoriesProps,
|
||||
@@ -29,13 +30,16 @@ export const getInitOutLinkChatInfo = (data: InitOutLinkChatProps) =>
|
||||
GET<InitChatResponse>(`/core/chat/outLink/init`, data);
|
||||
export const getTeamChatInfo = (data: InitTeamChatProps) =>
|
||||
GET<InitChatResponse>(`/core/chat/team/init`, data);
|
||||
|
||||
/**
|
||||
* get current window history(appid or shareId)
|
||||
*/
|
||||
export const getChatHistories = (data: GetHistoriesProps) =>
|
||||
POST<ChatHistoryItemType[]>('/core/chat/getHistories', data);
|
||||
|
||||
/**
|
||||
* get detail responseData by dataId appId chatId
|
||||
*/
|
||||
export const getChatResData = (data: getResDataQuery) =>
|
||||
GET<ChatHistoryItemResType[]>(`/core/chat/getResData`, data);
|
||||
/**
|
||||
* delete one history
|
||||
*/
|
||||
|
Reference in New Issue
Block a user