Record scroll test (#2783)

* perf: history add scrollList (#2696)

* perf: chatHistorySlider add virtualList

* perf: chat records add scrollList

* delete console

* perf: ScrollData add ref props

* 优化代码

* optimize code && add line breaks

* add total records display

* finish test

* perf: ScrollComponent load data

* perf: Scroll components load

* perf: scroll code

---------

Co-authored-by: papapatrick <109422393+Patrickill@users.noreply.github.com>
This commit is contained in:
Archer
2024-09-24 17:13:32 +08:00
committed by GitHub
parent f4d4d6516c
commit 434c03c955
46 changed files with 827 additions and 422 deletions

View File

@@ -9,23 +9,24 @@ import { MongoChat } from './chatSchema';
export async function getChatItems({
appId,
chatId,
limit = 30,
offset,
limit,
field
}: {
appId: string;
chatId?: string;
limit?: number;
offset: number;
limit: number;
field: string;
}): Promise<{ histories: ChatItemType[] }> {
}): Promise<{ histories: ChatItemType[]; total: number }> {
if (!chatId) {
return { histories: [] };
return { histories: [], total: 0 };
}
const histories = await MongoChatItem.find({ appId, chatId }, field)
.sort({ _id: -1 })
.limit(limit)
.lean();
const [histories, total] = await Promise.all([
MongoChatItem.find({ chatId, appId }, field).sort({ _id: -1 }).skip(offset).limit(limit).lean(),
MongoChatItem.countDocuments({ chatId, appId })
]);
histories.reverse();
histories.forEach((item) => {
@@ -33,7 +34,7 @@ export async function getChatItems({
item.value = adaptStringValue(item.value);
});
return { histories };
return { histories, total };
}
/* Temporary adaptation for old conversation records */

View File

@@ -1,5 +1,5 @@
export type PaginationProps<T = {}> = T & {
current: number;
offset: number;
pageSize: number;
};
export type PaginationResponse<T = any> = {

View File

@@ -1,7 +1,8 @@
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import React, { useEffect, useMemo, useRef, useState } from 'react';
import { useDisclosure, Button, ModalBody, ModalFooter } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
import MyModal from '../components/common/MyModal';
import { useMemoizedFn } from 'ahooks';
export const useConfirm = (props?: {
title?: string;
@@ -44,7 +45,7 @@ export const useConfirm = (props?: {
const confirmCb = useRef<Function>();
const cancelCb = useRef<any>();
const openConfirm = useCallback(
const openConfirm = useMemoizedFn(
(confirm?: Function, cancel?: any, customContent?: string | React.ReactNode) => {
confirmCb.current = confirm;
cancelCb.current = cancel;
@@ -52,11 +53,10 @@ export const useConfirm = (props?: {
customContent && setCustomContent(customContent);
return onOpen;
},
[]
}
);
const ConfirmModal = useCallback(
const ConfirmModal = useMemoizedFn(
({
closeText = t('common:common.Cancel'),
confirmText = t('common:common.Confirm'),
@@ -128,8 +128,7 @@ export const useConfirm = (props?: {
)}
</MyModal>
);
},
[customContent, hideFooter, iconSrc, isOpen, map.variant, onClose, showCancel, t, title]
}
);
return {

View File

@@ -1,9 +1,10 @@
import { useRef, useState, useCallback, useMemo } from 'react';
import { useRef, useState, useCallback, RefObject, ReactNode, useMemo } from 'react';
import { IconButton, Flex, Box, Input, BoxProps } from '@chakra-ui/react';
import { ArrowBackIcon, ArrowForwardIcon } from '@chakra-ui/icons';
import { useTranslation } from 'next-i18next';
import { useToast } from './useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import {
useBoolean,
useLockFn,
@@ -13,6 +14,8 @@ import {
useThrottleEffect
} from 'ahooks';
const thresholdVal = 200;
type PagingData<T> = {
pageNum: number;
pageSize: number;
@@ -27,7 +30,9 @@ export function usePagination<ResT = any>({
defaultRequest = true,
type = 'button',
onChange,
refreshDeps
refreshDeps,
scrollLoadType = 'bottom',
EmptyTip
}: {
api: (data: any) => Promise<PagingData<ResT>>;
pageSize?: number;
@@ -36,24 +41,26 @@ export function usePagination<ResT = any>({
type?: 'button' | 'scroll';
onChange?: (pageNum: number) => void;
refreshDeps?: any[];
throttleWait?: number;
scrollLoadType?: 'top' | 'bottom';
EmptyTip?: React.JSX.Element;
}) {
const { toast } = useToast();
const { t } = useTranslation();
const [pageNum, setPageNum] = useState(1);
const ScrollContainerRef = useRef<HTMLDivElement>(null);
const noMore = useRef(false);
const [isLoading, { setTrue, setFalse }] = useBoolean(false);
const [total, setTotal] = useState(0);
const [data, setData] = useState<ResT[]>([]);
const totalDataLength = useMemo(() => Math.max(total, data.length), [total, data.length]);
const maxPage = useMemo(() => Math.ceil(total / pageSize) || 1, [pageSize, total]);
const isEmpty = total === 0 && !isLoading;
const pageNum = useMemo(() => Math.ceil(data.length / pageSize), [data.length, pageSize]);
const noMore = data.length >= totalDataLength;
const fetchData = useLockFn(async (num: number = pageNum) => {
if (noMore.current && num !== 1) return;
const fetchData = useLockFn(
async (num: number = pageNum, ScrollContainerRef?: RefObject<HTMLDivElement>) => {
if (noMore && num !== 1) return;
setTrue();
try {
@@ -66,14 +73,30 @@ export function usePagination<ResT = any>({
// Check total and set
res.total !== undefined && setTotal(res.total);
if (res.total !== undefined && res.total <= data.length + res.data.length) {
noMore.current = true;
if (type === 'scroll') {
if (scrollLoadType === 'top') {
const prevHeight = ScrollContainerRef?.current?.scrollHeight || 0;
const prevScrollTop = ScrollContainerRef?.current?.scrollTop || 0;
// 使用 requestAnimationFrame 来调整滚动位置
function adjustScrollPosition() {
requestAnimationFrame(
ScrollContainerRef?.current
? () => {
if (ScrollContainerRef?.current) {
const newHeight = ScrollContainerRef.current.scrollHeight;
const heightDiff = newHeight - prevHeight;
ScrollContainerRef.current.scrollTop = prevScrollTop + heightDiff;
}
}
: adjustScrollPosition
);
}
setPageNum(num);
if (type === 'scroll') {
setData((prevData) => (num === 1 ? res.data : [...res.data, ...prevData]));
adjustScrollPosition();
} else {
setData((prevData) => (num === 1 ? res.data : [...prevData, ...res.data]));
}
} else {
setData(res.data);
}
@@ -88,9 +111,13 @@ export function usePagination<ResT = any>({
}
setFalse();
});
}
);
// Button pagination
const Pagination = useCallback(() => {
const maxPage = Math.ceil(totalDataLength / pageSize);
return (
<Flex alignItems={'center'} justifyContent={'end'}>
<IconButton
@@ -151,7 +178,74 @@ export function usePagination<ResT = any>({
/>
</Flex>
);
}, [isLoading, maxPage, fetchData, pageNum]);
}, [isLoading, totalDataLength, pageSize, fetchData, pageNum]);
// Scroll pagination
const DefaultRef = useRef<HTMLDivElement>(null);
const ScrollData = useMemoizedFn(
({
children,
ScrollContainerRef,
...props
}: {
children: ReactNode;
ScrollContainerRef?: RefObject<HTMLDivElement>;
} & BoxProps) => {
const ref = ScrollContainerRef || DefaultRef;
const loadText = (() => {
if (isLoading) return t('common:common.is_requesting');
if (noMore) return t('common:common.request_end');
return t('common:common.request_more');
})();
const scroll = useScroll(ref);
// Watch scroll position
useThrottleEffect(
() => {
if (!ref?.current || type !== 'scroll' || noMore) return;
const { scrollTop, scrollHeight, clientHeight } = ref.current;
if (
(scrollLoadType === 'bottom' &&
scrollTop + clientHeight >= scrollHeight - thresholdVal) ||
(scrollLoadType === 'top' && scrollTop < thresholdVal)
) {
fetchData(pageNum + 1, ref);
}
},
[scroll],
{ wait: 50 }
);
return (
<Box {...props} ref={ref} overflow={'overlay'}>
{scrollLoadType === 'top' && total > 0 && isLoading && (
<Box mt={2} fontSize={'xs'} color={'blackAlpha.500'} textAlign={'center'}>
{t('common:common.is_requesting')}
</Box>
)}
{children}
{scrollLoadType === 'bottom' && !isEmpty && (
<Box
mt={2}
fontSize={'xs'}
color={'blackAlpha.500'}
textAlign={'center'}
cursor={loadText === t('common:common.request_more') ? 'pointer' : 'default'}
onClick={() => {
if (loadText !== t('common:common.request_more')) return;
fetchData(pageNum + 1);
}}
>
{loadText}
</Box>
)}
{isEmpty && EmptyTip}
</Box>
);
}
);
// Reload data
const { runAsync: refresh } = useRequest(
@@ -166,53 +260,10 @@ export function usePagination<ResT = any>({
}
);
const ScrollData = useMemoizedFn(
({ children, ...props }: { children: React.ReactNode } & BoxProps) => {
const loadText = (() => {
if (isLoading) return t('common:common.is_requesting');
if (total <= data.length) return t('common:common.request_end');
return t('common:common.request_more');
})();
return (
<Box {...props} ref={ScrollContainerRef} overflow={'overlay'}>
{children}
<Box
mt={2}
fontSize={'xs'}
color={'blackAlpha.500'}
textAlign={'center'}
cursor={loadText === t('common:common.request_more') ? 'pointer' : 'default'}
onClick={() => {
if (loadText !== t('common:common.request_more')) return;
fetchData(pageNum + 1);
}}
>
{loadText}
</Box>
</Box>
);
}
);
// Scroll check
const scroll = useScroll(ScrollContainerRef);
useThrottleEffect(
() => {
if (!ScrollContainerRef?.current || type !== 'scroll' || total === 0) return;
const { scrollTop, scrollHeight, clientHeight } = ScrollContainerRef.current;
if (scrollTop + clientHeight >= scrollHeight - 100) {
fetchData(pageNum + 1);
}
},
[scroll],
{ wait: 50 }
);
return {
pageNum,
pageSize,
total,
total: totalDataLength,
data,
setData,
isLoading,

View File

@@ -1,4 +1,4 @@
import React, { useRef, useState } from 'react';
import React, { ReactNode, RefObject, useMemo, useRef, useState } from 'react';
import { Box, BoxProps } from '@chakra-ui/react';
import { useToast } from './useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
@@ -16,6 +16,7 @@ import MyBox from '../components/common/MyBox';
import { useTranslation } from 'next-i18next';
type ItemHeight<T> = (index: number, data: T) => number;
const thresholdVal = 200;
export type ScrollListType = ({
children,
@@ -28,7 +29,7 @@ export type ScrollListType = ({
isLoading?: boolean;
} & BoxProps) => React.JSX.Element;
export function useScrollPagination<
export function useVirtualScrollPagination<
TParams extends PaginationProps,
TData extends PaginationResponse
>(
@@ -53,15 +54,14 @@ export function useScrollPagination<
const { t } = useTranslation();
const containerRef = useRef<HTMLDivElement>(null);
const wrapperRef = useRef(null);
const noMore = useRef(false);
const { toast } = useToast();
const [current, setCurrent] = useState(1);
const [data, setData] = useState<TData['list']>([]);
const [total, setTotal] = useState(0);
const [isLoading, { setTrue, setFalse }] = useBoolean(false);
const noMore = data.length >= total;
const [list] = useVirtualList<TData['list'][0]>(data, {
containerTarget: containerRef,
wrapperTarget: wrapperRef,
@@ -69,28 +69,26 @@ export function useScrollPagination<
overscan
});
const loadData = useLockFn(async (num: number = current) => {
if (noMore.current && num !== 1) return;
const loadData = useLockFn(async (init = false) => {
if (noMore && !init) return;
const offset = init ? 0 : data.length;
setTrue();
try {
const res = await api({
current: num,
offset,
pageSize,
...defaultParams
} as TParams);
setTotal(res.total);
setCurrent(num);
if (num === 1) {
if (offset === 0) {
// init or reload
setData(res.list);
noMore.current = res.list.length >= res.total;
} else {
const totalLength = data.length + res.list.length;
noMore.current = totalLength >= res.total;
setData((prev) => [...prev, ...res.list]);
}
} catch (error: any) {
@@ -125,7 +123,7 @@ export function useScrollPagination<
<MyBox isLoading={isLoading} ref={containerRef} overflow={'overlay'} {...props}>
<Box ref={wrapperRef}>
{children}
{noMore.current && list.length > 0 && (
{noMore && list.length > 0 && (
<Box py={4} textAlign={'center'} color={'myGray.600'} fontSize={'xs'}>
{t('common:common.No more data')}
</Box>
@@ -141,7 +139,7 @@ export function useScrollPagination<
// Reload data
useRequest(
async () => {
loadData(1);
loadData(true);
},
{
manual: false,
@@ -155,9 +153,9 @@ export function useScrollPagination<
() => {
if (!containerRef.current || list.length === 0) return;
const { scrollTop, scrollHeight, clientHeight } = containerRef.current;
console.log('=======', 111111);
if (scrollTop + clientHeight >= scrollHeight - 100) {
loadData(current + 1);
if (scrollTop + clientHeight >= scrollHeight - thresholdVal) {
loadData(false);
}
},
[scroll],
@@ -178,3 +176,178 @@ export function useScrollPagination<
scroll2Top
};
}
export function useScrollPagination<
TParams extends PaginationProps,
TData extends PaginationResponse
>(
api: (data: TParams) => Promise<TData>,
{
refreshDeps,
scrollLoadType = 'bottom',
pageSize = 10,
params = {},
EmptyTip
}: {
refreshDeps?: any[];
scrollLoadType?: 'top' | 'bottom';
pageSize?: number;
params?: Record<string, any>;
EmptyTip?: React.JSX.Element;
}
) {
const { t } = useTranslation();
const { toast } = useToast();
const [data, setData] = useState<TData['list']>([]);
const [total, setTotal] = useState(0);
const [isLoading, { setTrue, setFalse }] = useBoolean(false);
const isEmpty = total === 0 && !isLoading;
const noMore = data.length >= total;
const loadData = useLockFn(
async (init = false, ScrollContainerRef?: RefObject<HTMLDivElement>) => {
if (noMore && !init) return;
const offset = init ? 0 : data.length;
setTrue();
try {
const res = await api({
offset,
pageSize,
...params
} as TParams);
setTotal(res.total);
if (scrollLoadType === 'top') {
const prevHeight = ScrollContainerRef?.current?.scrollHeight || 0;
const prevScrollTop = ScrollContainerRef?.current?.scrollTop || 0;
// 使用 requestAnimationFrame 来调整滚动位置
function adjustScrollPosition() {
requestAnimationFrame(
ScrollContainerRef?.current
? () => {
if (ScrollContainerRef?.current) {
const newHeight = ScrollContainerRef.current.scrollHeight;
const heightDiff = newHeight - prevHeight;
ScrollContainerRef.current.scrollTop = prevScrollTop + heightDiff;
}
}
: adjustScrollPosition
);
}
setData((prevData) => (offset === 0 ? res.list : [...res.list, ...prevData]));
adjustScrollPosition();
} else {
setData((prevData) => (offset === 0 ? res.list : [...prevData, ...res.list]));
}
} catch (error: any) {
toast({
title: getErrText(error, t('common:core.chat.error.data_error')),
status: 'error'
});
console.log(error);
}
setFalse();
}
);
let ScrollRef = useRef<HTMLDivElement>(null);
const ScrollData = useMemoizedFn(
({
children,
ScrollContainerRef,
...props
}: {
children: ReactNode;
ScrollContainerRef?: RefObject<HTMLDivElement>;
} & BoxProps) => {
const ref = ScrollContainerRef || ScrollRef;
const loadText = useMemo(() => {
if (isLoading) return t('common:common.is_requesting');
if (noMore) return t('common:common.request_end');
return t('common:common.request_more');
}, [isLoading, noMore]);
const scroll = useScroll(ref);
// Watch scroll position
useThrottleEffect(
() => {
if (!ref?.current || noMore) return;
const { scrollTop, scrollHeight, clientHeight } = ref.current;
if (
(scrollLoadType === 'bottom' &&
scrollTop + clientHeight >= scrollHeight - thresholdVal) ||
(scrollLoadType === 'top' && scrollTop < thresholdVal)
) {
loadData(false, ref);
}
},
[scroll],
{ wait: 50 }
);
return (
<Box {...props} ref={ref} overflow={'overlay'}>
{scrollLoadType === 'top' && total > 0 && isLoading && (
<Box mt={2} fontSize={'xs'} color={'blackAlpha.500'} textAlign={'center'}>
{t('common:common.is_requesting')}
</Box>
)}
{children}
{scrollLoadType === 'bottom' && !isEmpty && (
<Box
mt={2}
fontSize={'xs'}
color={'blackAlpha.500'}
textAlign={'center'}
cursor={loadText === t('common:common.request_more') ? 'pointer' : 'default'}
onClick={() => {
if (loadText !== t('common:common.request_more')) return;
loadData(false);
}}
>
{loadText}
</Box>
)}
{isEmpty && EmptyTip}
</Box>
);
}
);
// Reload data
useRequest(
async () => {
loadData(true);
},
{
manual: false,
refreshDeps
}
);
const refreshList = useMemoizedFn(() => {
loadData(true);
});
return {
ScrollData,
isLoading,
total: Math.max(total, data.length),
data,
setData,
fetchData: loadData,
refreshList
};
}

View File

@@ -725,6 +725,7 @@
"core.module.template.empty_workflow": "空白工作流",
"core.module.template.http body placeholder": "与 Apifox 相同的语法",
"core.module.template.self_output": "插件输出",
"core.module.template.self_input": "插件输入",
"core.module.template.system_config": "系统配置",
"core.module.template.system_config_info": "可以配置应用的系统参数",
"core.module.template.work_start": "流程开始",

View File

@@ -1,4 +1,4 @@
import React, { useEffect, useMemo } from 'react';
import React, { useMemo } from 'react';
import { Box, Flex } from '@chakra-ui/react';
import { useRouter } from 'next/router';
import { useLoading } from '@fastgpt/web/hooks/useLoading';
@@ -12,7 +12,7 @@ import { useI18nLng } from '@fastgpt/web/hooks/useI18n';
import Auth from './auth';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useMount } from 'ahooks';
import { watchWindowHidden } from '@/web/common/system/utils';
const Navbar = dynamic(() => import('./navbar'));
const NavbarPhone = dynamic(() => import('./navbarPhone'));
const UpdateInviteModal = dynamic(() => import('@/components/support/user/team/UpdateInviteModal'));
@@ -70,14 +70,6 @@ const Layout = ({ children }: { children: JSX.Element }) => {
setUserDefaultLng();
});
// Add global listener
useEffect(() => {
document.addEventListener('visibilitychange', watchWindowHidden);
return () => {
document.removeEventListener('visibilitychange', watchWindowHidden);
};
});
return (
<>
<Box h={'100%'} bg={'myGray.100'}>

View File

@@ -28,7 +28,7 @@ import {
putChatInputGuide
} from '@/web/core/chat/inputGuide/api';
import { useQuery } from '@tanstack/react-query';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { useVirtualScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { useSelectFile } from '@/web/common/file/hooks/useSelectFile';
@@ -204,7 +204,7 @@ const LexiconConfigModal = ({ appId, onClose }: { appId: string; onClose: () =>
isLoading: isRequesting,
fetchData,
scroll2Top
} = useScrollPagination(getChatInputGuideList, {
} = useVirtualScrollPagination(getChatInputGuideList, {
refreshDeps: [searchKey],
// debounceWait: 300,

View File

@@ -17,7 +17,7 @@ import {
defaultWhisperConfig
} from '@fastgpt/global/core/app/constants';
import { createContext } from 'use-context-selector';
import { FieldValues, UseFormReturn } from 'react-hook-form';
import { UseFormReturn } from 'react-hook-form';
import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
import { getChatResData } from '@/web/core/chat/api';
import { ChatBoxInputFormType } from './type';

View File

@@ -16,7 +16,7 @@ import type {
} from '@fastgpt/global/core/chat/type.d';
import { useToast } from '@fastgpt/web/hooks/useToast';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { Box, Flex, Checkbox } from '@chakra-ui/react';
import { Box, Flex, Checkbox, BoxProps } from '@chakra-ui/react';
import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus';
import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt';
import { useForm } from 'react-hook-form';
@@ -44,7 +44,11 @@ import ChatInput from './Input/ChatInput';
import ChatBoxDivider from '../../Divider';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import {
ChatItemValueTypeEnum,
ChatRoleEnum,
ChatStatusEnum
} from '@fastgpt/global/core/chat/constants';
import {
checkIsInteractiveByHistories,
formatChatValue2InputType,
@@ -86,7 +90,13 @@ type Props = OutLinkChatAuthProps &
userAvatar?: string;
active?: boolean; // can use
appId: string;
ScrollData: ({
children,
...props
}: {
children: React.ReactNode;
ScrollContainerRef?: React.RefObject<HTMLDivElement>;
} & BoxProps) => React.JSX.Element;
// not chat test params
onStartChat?: (e: StartChatFnProps) => Promise<
@@ -113,7 +123,8 @@ const ChatBox = (
teamId,
teamToken,
onStartChat,
onDelMessage
onDelMessage,
ScrollData
}: Props,
ref: ForwardedRef<ComponentRef>
) => {
@@ -171,7 +182,7 @@ const ChatBox = (
const chatStarted = chatStartedWatch || chatHistories.length > 0 || variableList.length === 0;
// 滚动到底部
const scrollToBottom = useCallback((behavior: 'smooth' | 'auto' = 'smooth', delay = 0) => {
const scrollToBottom = useMemoizedFn((behavior: 'smooth' | 'auto' = 'smooth', delay = 0) => {
setTimeout(() => {
if (!ChatBoxRef.current) {
setTimeout(() => {
@@ -184,7 +195,7 @@ const ChatBox = (
});
}
}, delay);
}, []);
});
// 聊天信息生成中……获取当前滚动条位置,判断是否需要滚动到底部
const { run: generatingScroll } = useThrottleFn(
@@ -201,7 +212,7 @@ const ChatBox = (
}
);
const generatingMessage = useCallback(
const generatingMessage = useMemoizedFn(
({
event,
text = '',
@@ -311,13 +322,11 @@ const ChatBox = (
})
);
generatingScroll();
},
[generatingScroll, setChatHistories, splitText2Audio, variablesForm]
}
);
// 重置输入内容
const resetInputVal = useCallback(
({ text = '', files = [] }: ChatBoxInputType) => {
const resetInputVal = useMemoizedFn(({ text = '', files = [] }: ChatBoxInputType) => {
if (!TextareaDom.current) return;
setValue('files', files);
setValue('input', text);
@@ -329,9 +338,7 @@ const ChatBox = (
text === '' ? textareaMinH : `${TextareaDom.current.scrollHeight}px`;
}
}, 100);
},
[setValue]
);
});
// create question guide
const createQuestionGuide = useCallback(
@@ -363,11 +370,11 @@ const ChatBox = (
);
/* Abort chat completions, questionGuide */
const abortRequest = useCallback(() => {
const abortRequest = useMemoizedFn(() => {
chatController.current?.abort('stop');
questionGuideController.current?.abort('stop');
pluginController.current?.abort('stop');
}, []);
});
/**
* user confirm send prompt
@@ -445,7 +452,7 @@ const ChatBox = (
]
: [])
] as UserChatItemValueItemType[],
status: 'finish'
status: ChatStatusEnum.finish
},
// 普通 chat 模式,需要增加一个 AI 来接收响应消息
{
@@ -459,7 +466,7 @@ const ChatBox = (
}
}
],
status: 'loading'
status: ChatStatusEnum.loading
}
];
@@ -506,7 +513,7 @@ const ChatBox = (
if (index !== state.length - 1) return item;
return {
...item,
status: 'finish',
status: ChatStatusEnum.finish,
responseData: item.responseData
? [...item.responseData, ...responseData]
: responseData
@@ -548,7 +555,7 @@ const ChatBox = (
if (index !== state.length - 1) return item;
return {
...item,
status: 'finish'
status: ChatStatusEnum.finish
};
})
);
@@ -806,7 +813,7 @@ const ChatBox = (
if (!chatContent) return;
return {
status: chatContent.status || 'loading',
status: chatContent.status || ChatStatusEnum.loading,
name: t(chatContent.moduleName || ('' as any)) || t('common:common.Loading')
};
}, [chatHistories, isChatting, t]);
@@ -854,14 +861,26 @@ const ChatBox = (
useImperativeHandle(ref, () => ({
restartChat() {
abortRequest();
setChatHistories([]);
setValue('chatStarted', false);
scrollToBottom('smooth', 500);
},
scrollToBottom(behavior = 'auto') {
scrollToBottom(behavior, 500);
}
}));
const RenderRecords = useMemo(() => {
return (
<Box ref={ChatBoxRef} flex={'1 0 0'} h={0} w={'100%'} overflow={'overlay'} px={[4, 0]} pb={3}>
<ScrollData
ScrollContainerRef={ChatBoxRef}
flex={'1 0 0'}
h={0}
w={'100%'}
overflow={'overlay'}
px={[4, 0]}
pb={3}
>
<Box id="chat-container" maxW={['100%', '92%']} h={'100%'} mx={'auto'}>
{showEmpty && <Empty />}
{!!welcomeText && <WelcomeBox welcomeText={welcomeText} />}
@@ -957,9 +976,10 @@ const ChatBox = (
))}
</Box>
</Box>
</Box>
</ScrollData>
);
}, [
ScrollData,
appAvatar,
chatForm,
chatHistories,

View File

@@ -40,4 +40,5 @@ export type SendPromptFnType = (
export type ComponentRef = {
restartChat: () => void;
scrollToBottom: (behavior?: 'smooth' | 'auto') => void;
};

View File

@@ -8,20 +8,23 @@ import {
SendPromptFnType
} from './ChatBox/type';
import { eventBus, EventNameEnum } from '@/web/common/utils/eventbus';
import { getChatRecords } from '@/web/core/chat/api';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { GetChatRecordsProps } from '@/global/core/chat/api';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { PaginationResponse } from '../../../../../../../packages/web/common/fetch/type';
import type { getPaginationRecordsBody } from '@/pages/api/core/chat/getPaginationRecords';
export const useChat = () => {
const ChatBoxRef = useRef<ChatComponentRef>(null);
const [chatRecords, setChatRecords] = useState<ChatSiteItemType[]>([]);
const variablesForm = useForm<ChatBoxInputFormType>();
// plugin
const [pluginRunTab, setPluginRunTab] = useState<PluginRunBoxTabEnum>(PluginRunBoxTabEnum.input);
const resetChatRecords = useCallback(
(props?: { records?: ChatSiteItemType[]; variables?: Record<string, any> }) => {
const { records = [], variables = {} } = props || {};
setChatRecords(records);
const resetVariables = useCallback(
(props?: { variables?: Record<string, any> }) => {
const { variables = {} } = props || {};
// Reset to empty input
const data = variablesForm.getValues();
@@ -33,20 +36,11 @@ export const useChat = () => {
...data,
...variables
});
setTimeout(
() => {
ChatBoxRef.current?.restartChat?.();
},
ChatBoxRef.current?.restartChat ? 0 : 500
);
},
[variablesForm, setChatRecords]
[variablesForm]
);
const clearChatRecords = useCallback(() => {
setChatRecords([]);
const data = variablesForm.getValues();
for (const key in data) {
variablesForm.setValue(key, '');
@@ -55,15 +49,47 @@ export const useChat = () => {
ChatBoxRef.current?.restartChat?.();
}, [variablesForm]);
const useChatScrollData = useCallback((params: GetChatRecordsProps) => {
return useScrollPagination(
async (data: getPaginationRecordsBody): Promise<PaginationResponse<ChatSiteItemType>> => {
const res = await getChatRecords(data);
// First load scroll to bottom
if (data.offset === 0) {
function scrollToBottom() {
requestAnimationFrame(
ChatBoxRef?.current ? () => ChatBoxRef?.current?.scrollToBottom?.() : scrollToBottom
);
}
scrollToBottom();
}
return {
...res,
list: res.list.map((item) => ({
...item,
dataId: item.dataId || getNanoid(),
status: ChatStatusEnum.finish
}))
};
},
{
pageSize: 10,
refreshDeps: [params],
params,
scrollLoadType: 'top'
}
);
}, []);
return {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
clearChatRecords,
resetChatRecords
resetVariables,
useChatScrollData
};
};

View File

@@ -10,7 +10,6 @@ import { UploadChunkItemType } from '@fastgpt/global/core/dataset/type';
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { PaginationProps } from '@fastgpt/web/common/fetch/type';
/* ===== dataset ===== */

View File

@@ -2,7 +2,8 @@ import type { AppChatConfigType, AppTTSConfigType } from '@fastgpt/global/core/a
import { AdminFbkType, ChatItemType } from '@fastgpt/global/core/chat/type';
import type { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat.d';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { RequestPaging } from '@/types';
import { GetChatTypeEnum } from '@/global/core/chat/constants';
export type GetChatSpeechProps = {
ttsConfig: AppTTSConfigType;
input: string;
@@ -15,6 +16,14 @@ export type InitChatProps = {
chatId?: string;
loadCustomFeedbacks?: boolean;
};
export type GetChatRecordsProps = OutLinkChatAuthProps & {
appId: string;
chatId?: string;
loadCustomFeedbacks?: boolean;
type: `${GetChatTypeEnum}`;
};
export type InitOutLinkChatProps = {
chatId?: string;
shareId: string;
@@ -32,7 +41,6 @@ export type InitChatResponse = {
userAvatar?: string;
title?: string;
variables: Record<string, any>;
history: ChatItemType[];
app: {
chatConfig?: AppChatConfigType;
chatModels?: string[];

View File

@@ -13,6 +13,11 @@ export const defaultChatData: InitChatResponse = {
pluginInputs: []
},
title: '',
variables: {},
history: []
variables: {}
};
export enum GetChatTypeEnum {
normal = 'normal',
outLink = 'outLink',
team = 'team'
}

View File

@@ -6,7 +6,7 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
const isLLMNode = (item: ChatHistoryItemResType) =>
item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools;
export function transformPreviewHistories(histories: ChatItemType[]) {
export function transformPreviewHistories(histories: ChatItemType[]): ChatItemType[] {
return histories.map((item) => {
return {
...addStatisticalDataToHistoryItem(item),

View File

@@ -1,7 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { NextAPI } from '@/service/middleware/entry';
import { MongoAppVersion } from '@fastgpt/service/core/app/version/schema';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';

View File

@@ -3,6 +3,7 @@ import { NextAPI } from '@/service/middleware/entry';
import { MongoAppVersion } from '@fastgpt/service/core/app/version/schema';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { AppVersionSchemaType } from '@fastgpt/global/core/app/version';
import { ApiRequestProps } from '@fastgpt/service/type/next';
type Props = PaginationProps<{
appId: string;
@@ -10,8 +11,8 @@ type Props = PaginationProps<{
type Response = PaginationResponse<AppVersionSchemaType>;
async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<Response> {
const { current, pageSize, appId } = req.body as Props;
async function handler(req: ApiRequestProps<Props>, res: NextApiResponse<any>): Promise<Response> {
const { offset, pageSize, appId } = req.body;
const [result, total] = await Promise.all([
MongoAppVersion.find({
@@ -20,7 +21,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<
.sort({
time: -1
})
.skip((current - 1) * pageSize)
.skip(offset)
.limit(pageSize),
MongoAppVersion.countDocuments({ appId })
]);

View File

@@ -2,6 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { NextAPI } from '@/service/middleware/entry';
import { MongoAppVersion } from '@fastgpt/service/core/app/version/schema';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { ApiRequestProps } from '@fastgpt/service/type/next';
type Props = PaginationProps<{
appId: string;
@@ -18,8 +19,8 @@ export type versionListResponse = {
type Response = PaginationResponse<versionListResponse>;
async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<Response> {
const { current, pageSize, appId } = req.body as Props;
async function handler(req: ApiRequestProps<Props>, res: NextApiResponse<any>): Promise<Response> {
const { offset, pageSize, appId } = req.body;
const [result, total] = await Promise.all([
MongoAppVersion.find(
@@ -31,7 +32,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse<any>): Promise<
.sort({
time: -1
})
.skip((current - 1) * pageSize)
.skip(offset)
.limit(pageSize),
MongoAppVersion.countDocuments({ appId })
]);

View File

@@ -1,4 +1,3 @@
import { connectToDatabase } from '@/service/mongo';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
import { authOutLink } from '@/service/support/permission/auth/outLink';
@@ -18,7 +17,7 @@ async function handler(
req: ApiRequestProps<getHistoriesBody, getHistoriesQuery>,
res: ApiResponseType<any>
): Promise<PaginationResponse<getHistoriesResponse>> {
const { appId, shareId, outLinkUid, teamId, teamToken, current, pageSize } =
const { appId, shareId, outLinkUid, teamId, teamToken, offset, pageSize } =
req.body as getHistoriesBody;
const match = await (async () => {
@@ -63,7 +62,7 @@ async function handler(
const [data, total] = await Promise.all([
await MongoChat.find(match, 'chatId title top customTitle appId updateTime')
.sort({ top: -1, updateTime: -1 })
.skip((current - 1) * pageSize)
.skip(offset)
.limit(pageSize),
MongoChat.countDocuments(match)
]);

View File

@@ -0,0 +1,93 @@
import type { ApiRequestProps, ApiResponseType } from '@fastgpt/service/type/next';
import { NextAPI } from '@/service/middleware/entry';
import { GetChatRecordsProps } from '@/global/core/chat/api';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { transformPreviewHistories } from '@/global/core/chat/utils';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { authChatCrud } from '@/service/support/permission/auth/chat';
import { MongoApp } from '@fastgpt/service/core/app/schema';
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
import { authOutLink } from '@/service/support/permission/auth/outLink';
import { GetChatTypeEnum } from '@/global/core/chat/constants';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { ChatItemType } from '@fastgpt/global/core/chat/type';
export type getPaginationRecordsQuery = {};
export type getPaginationRecordsBody = PaginationProps & GetChatRecordsProps;
export type getPaginationRecordsResponse = PaginationResponse<ChatItemType>;
async function handler(
req: ApiRequestProps<getPaginationRecordsBody, getPaginationRecordsQuery>,
res: ApiResponseType<any>
): Promise<getPaginationRecordsResponse> {
const { chatId, appId, offset, pageSize = 10, loadCustomFeedbacks, type } = req.body;
if (!appId || !chatId) {
return {
list: [],
total: 0
};
}
const [app] = await Promise.all([
MongoApp.findById(appId, 'type').lean(),
authChatCrud({
req,
authToken: true,
...req.body,
per: ReadPermissionVal
})
]);
if (!app) {
return Promise.reject(AppErrEnum.unExist);
}
const isPlugin = app.type === AppTypeEnum.plugin;
const shareChat = await (async () => {
if (type === GetChatTypeEnum.outLink)
return await authOutLink({
shareId: req.body.shareId,
outLinkUid: req.body.outLinkUid
}).then((result) => result.shareChat);
})();
const fieldMap = {
[GetChatTypeEnum.normal]: `dataId obj value adminFeedback userBadFeedback userGoodFeedback ${
DispatchNodeResponseKeyEnum.nodeResponse
} ${loadCustomFeedbacks ? 'customFeedbacks' : ''}`,
[GetChatTypeEnum.outLink]: `dataId obj value userGoodFeedback userBadFeedback adminFeedback ${
shareChat?.responseDetail || isPlugin ? `${DispatchNodeResponseKeyEnum.nodeResponse}` : ''
} `,
[GetChatTypeEnum.team]: `dataId obj value userGoodFeedback userBadFeedback adminFeedback ${DispatchNodeResponseKeyEnum.nodeResponse}`
};
const { total, histories } = await getChatItems({
appId,
chatId,
field: fieldMap[type],
offset,
limit: pageSize
});
// Remove important information
if (type === 'outLink' && app.type !== AppTypeEnum.plugin) {
histories.forEach((item) => {
if (item.obj === ChatRoleEnum.AI) {
item.responseData = filterPublicNodeResponseData({ flowResponses: item.responseData });
}
});
}
return {
list: isPlugin ? histories : transformPreviewHistories(histories),
total
};
}
export default NextAPI(handler);

View File

@@ -5,15 +5,11 @@ import { getGuideModule, getAppChatConfig } from '@fastgpt/global/core/workflow/
import { getChatModelNameListByModules } from '@/service/core/app/workflow';
import type { InitChatProps, InitChatResponse } from '@/global/core/chat/api.d';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { transformPreviewHistories } from '@/global/core/chat/utils';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
async function handler(
req: NextApiRequest,
@@ -45,17 +41,8 @@ async function handler(
}
// get app and history
const [{ histories }, { nodes, chatConfig }] = await Promise.all([
getChatItems({
appId,
chatId,
limit: 30,
field: `dataId obj value adminFeedback userBadFeedback userGoodFeedback ${
DispatchNodeResponseKeyEnum.nodeResponse
} ${loadCustomFeedbacks ? 'customFeedbacks' : ''}`
}),
getAppLatestVersion(app._id, app)
]);
const { nodes, chatConfig } = await getAppLatestVersion(app._id, app);
const pluginInputs =
app?.modules?.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)?.inputs ?? [];
@@ -65,7 +52,6 @@ async function handler(
title: chat?.title,
userAvatar: undefined,
variables: chat?.variables || {},
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: {
chatConfig: getAppChatConfig({
chatConfig,

View File

@@ -14,10 +14,10 @@ export type ChatInputGuideProps = PaginationProps<{
export type ChatInputGuideResponse = PaginationResponse<ChatInputGuideSchemaType>;
async function handler(
req: ApiRequestProps<{}, ChatInputGuideProps>,
req: ApiRequestProps<ChatInputGuideProps>,
res: NextApiResponse<any>
): Promise<ChatInputGuideResponse> {
const { appId, pageSize, current, searchKey } = req.query;
const { appId, pageSize, offset, searchKey } = req.body;
await authApp({ req, appId, authToken: true, per: ReadPermissionVal });
@@ -27,10 +27,7 @@ async function handler(
};
const [result, total] = await Promise.all([
MongoChatInputGuide.find(params)
.sort({ _id: -1 })
.skip(pageSize * (current - 1))
.limit(pageSize),
MongoChatInputGuide.find(params).sort({ _id: -1 }).skip(offset).limit(pageSize),
MongoChatInputGuide.countDocuments(params)
]);

View File

@@ -3,8 +3,6 @@ import { jsonRes } from '@fastgpt/service/common/response';
import type { InitChatResponse, InitOutLinkChatProps } from '@/global/core/chat/api.d';
import { getGuideModule, getAppChatConfig } from '@fastgpt/global/core/workflow/utils';
import { getChatModelNameListByModules } from '@/service/core/app/workflow';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
import { authOutLink } from '@/service/support/permission/auth/outLink';
import { MongoApp } from '@fastgpt/service/core/app/schema';
@@ -13,8 +11,6 @@ import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { transformPreviewHistories } from '@/global/core/chat/utils';
import { NextAPI } from '@/service/middleware/entry';
async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -39,19 +35,8 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
throw new Error(ChatErrEnum.unAuthChat);
}
const [{ histories }, { nodes, chatConfig }] = await Promise.all([
getChatItems({
appId: app._id,
chatId,
limit: 30,
field: `dataId obj value userGoodFeedback userBadFeedback ${
shareChat.responseDetail || app.type === AppTypeEnum.plugin
? `adminFeedback ${DispatchNodeResponseKeyEnum.nodeResponse}`
: ''
} `
}),
getAppLatestVersion(app._id, app)
]);
const { nodes, chatConfig } = await getAppLatestVersion(app._id, app);
// pick share response field
jsonRes<InitChatResponse>(res, {
data: {
@@ -61,7 +46,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
//@ts-ignore
userAvatar: tmb?.userId?.avatar,
variables: chat?.variables || {},
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: {
chatConfig: getAppChatConfig({
chatConfig,

View File

@@ -2,19 +2,15 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { getGuideModule, getAppChatConfig } from '@fastgpt/global/core/workflow/utils';
import { getChatModelNameListByModules } from '@/service/core/app/workflow';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import type { InitChatResponse, InitTeamChatProps } from '@/global/core/chat/api.d';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { MongoApp } from '@fastgpt/service/core/app/schema';
import { getChatItems } from '@fastgpt/service/core/chat/controller';
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
import { authTeamSpaceToken } from '@/service/support/permission/auth/team';
import { MongoTeam } from '@fastgpt/service/support/user/team/teamSchema';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { getAppLatestVersion } from '@fastgpt/service/core/app/controller';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { transformPreviewHistories } from '@/global/core/chat/utils';
import { NextAPI } from '@/service/middleware/entry';
async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -45,15 +41,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
// get app and history
const [{ histories }, { nodes, chatConfig }] = await Promise.all([
getChatItems({
appId,
chatId,
limit: 30,
field: `dataId obj value userGoodFeedback userBadFeedback adminFeedback ${DispatchNodeResponseKeyEnum.nodeResponse}`
}),
getAppLatestVersion(app._id, app)
]);
const { nodes, chatConfig } = await getAppLatestVersion(app._id, app);
// pick share response field
jsonRes<InitChatResponse>(res, {
data: {
@@ -62,7 +52,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
title: chat?.title,
userAvatar: team?.avatar,
variables: chat?.variables || {},
history: app.type === AppTypeEnum.plugin ? histories : transformPreviewHistories(histories),
app: {
chatConfig: getAppChatConfig({
chatConfig,

View File

@@ -22,21 +22,22 @@ export type GetScrollCollectionsProps = PaginationProps<{
}>;
async function handler(
req: ApiRequestProps<{}, GetScrollCollectionsProps>
req: ApiRequestProps<GetScrollCollectionsProps, {}>
): Promise<PaginationResponse<DatasetCollectionsListItemType>> {
let {
datasetId,
pageSize = 10,
current = 1,
offset,
parentId = null,
searchText = '',
selectFolder = false,
filterTags = [],
simple = false
} = req.query;
} = req.body;
if (!datasetId) {
return Promise.reject(CommonErrEnum.missingParams);
}
searchText = searchText?.replace(/'/g, '');
pageSize = Math.min(pageSize, 30);
@@ -84,7 +85,7 @@ async function handler(
.sort({
updateTime: -1
})
.skip(pageSize * (current - 1))
.skip(offset)
.limit(pageSize)
.lean();
@@ -110,7 +111,7 @@ async function handler(
$sort: { updateTime: -1 }
},
{
$skip: (current - 1) * pageSize
$skip: offset
},
{
$limit: pageSize

View File

@@ -3,19 +3,20 @@ import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { replaceRegChars } from '@fastgpt/global/common/string/tools';
import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { PagingData, RequestPaging } from '@/types';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { DatasetDataListItemType } from '@/global/core/dataset/type';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
export type GetDatasetDataListProps = RequestPaging & {
export type GetDatasetDataListProps = PaginationProps & {
searchText?: string;
collectionId: string;
};
export type GetDatasetDataListRes = PaginationResponse<DatasetDataListItemType>;
async function handler(
req: ApiRequestProps<GetDatasetDataListProps>
): Promise<PagingData<DatasetDataListItemType>> {
let { pageNum = 1, pageSize = 10, searchText = '', collectionId } = req.body;
): Promise<GetDatasetDataListRes> {
let { offset, pageSize = 10, searchText = '', collectionId } = req.body;
pageSize = Math.min(pageSize, 30);
@@ -40,19 +41,17 @@ async function handler(
: {})
};
const [data, total] = await Promise.all([
const [list, total] = await Promise.all([
MongoDatasetData.find(match, '_id datasetId collectionId q a chunkIndex')
.sort({ chunkIndex: 1, updateTime: -1 })
.skip((pageNum - 1) * pageSize)
.skip(offset)
.limit(pageSize)
.lean(),
MongoDatasetData.countDocuments(match)
]);
return {
pageNum,
pageSize,
data,
list,
total
};
}

View File

@@ -208,6 +208,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
getChatItems({
appId: app._id,
chatId,
offset: 0,
limit,
field: `dataId obj value nodeOutputs`
}),
@@ -555,6 +556,7 @@ const authHeaderRequest = async ({
};
} else {
// token_auth
if (!appId) {
return Promise.reject('appId is empty');
}

View File

@@ -1,4 +1,4 @@
import React from 'react';
import React, { useMemo } from 'react';
import { Flex, Box, useTheme } from '@chakra-ui/react';
import { useTranslation } from 'next-i18next';
import { HUMAN_ICON } from '@fastgpt/global/common/system/constants';
@@ -16,6 +16,7 @@ import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { useQuery } from '@tanstack/react-query';
import { PcHeader } from '@/pages/chat/components/ChatHeader';
import { GetChatTypeEnum } from '@/global/core/chat/constants';
const PluginRunBox = dynamic(() => import('@/components/core/chat/ChatContainer/PluginRunBox'));
@@ -31,29 +32,36 @@ const DetailLogsModal = ({
const { t } = useTranslation();
const { isPc } = useSystem();
const theme = useTheme();
const params = useMemo(() => {
return {
chatId,
appId,
loadCustomFeedbacks: true,
type: GetChatTypeEnum.normal
};
}, [appId, chatId]);
const {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
resetChatRecords
resetVariables,
useChatScrollData
} = useChat();
const {
data: chatRecords,
ScrollData,
setData: setChatRecords,
total: totalRecordsCount
} = useChatScrollData(params);
const { data: chat, isFetching } = useQuery(
['getChatDetail', chatId],
() => getInitChatInfo({ appId, chatId, loadCustomFeedbacks: true }),
{
onSuccess(res) {
const history = res.history.map((item) => ({
...item,
dataId: item.dataId || getNanoid(),
status: 'finish' as any
}));
resetChatRecords({
records: history,
resetVariables({
variables: res.variables
});
}
@@ -63,11 +71,12 @@ const DetailLogsModal = ({
const title = chat?.title;
const chatModels = chat?.app?.chatModels;
const isPlugin = chat?.app.type === AppTypeEnum.plugin;
const loading = isFetching;
return (
<>
<MyBox
isLoading={isFetching}
isLoading={loading}
display={'flex'}
flexDirection={'column'}
zIndex={3}
@@ -124,7 +133,11 @@ const DetailLogsModal = ({
>
{isPc ? (
<>
<PcHeader title={title || ''} history={chatRecords} chatModels={chatModels} />
<PcHeader
totalRecordsCount={totalRecordsCount}
title={title || ''}
chatModels={chatModels}
/>
<Box flex={1} />
</>
) : (
@@ -157,6 +170,7 @@ const DetailLogsModal = ({
</Box>
) : (
<ChatBox
ScrollData={ScrollData}
ref={ChatBoxRef}
chatHistories={chatRecords}
setChatHistories={setChatRecords}

View File

@@ -1,6 +1,6 @@
import React, { useCallback, useState } from 'react';
import { getPublishList, postRevertVersion } from '@/web/core/app/api/version';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { useVirtualScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import CustomRightDrawer from '@fastgpt/web/components/common/MyDrawer/CustomRightDrawer';
import { useTranslation } from 'next-i18next';
import { Box, Button, Flex } from '@chakra-ui/react';
@@ -41,7 +41,7 @@ const PublishHistoriesSlider = ({
const [selectedHistoryId, setSelectedHistoryId] = useState<string>();
const { scrollDataList, ScrollList, isLoading } = useScrollPagination(getPublishList, {
const { scrollDataList, ScrollList, isLoading } = useVirtualScrollPagination(getPublishList, {
itemHeight: 49,
overscan: 20,

View File

@@ -4,7 +4,7 @@ import {
getWorkflowVersionList,
updateAppVersion
} from '@/web/core/app/api/version';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { useVirtualScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import CustomRightDrawer from '@fastgpt/web/components/common/MyDrawer/CustomRightDrawer';
import { useTranslation } from 'next-i18next';
import { Box, Button, Flex, Input } from '@chakra-ui/react';
@@ -180,7 +180,7 @@ const TeamCloud = () => {
const { loadAndGetTeamMembers } = useUserStore();
const { feConfigs } = useSystemStore();
const { scrollDataList, ScrollList, isLoading, fetchData } = useScrollPagination(
const { scrollDataList, ScrollList, isLoading, fetchData } = useVirtualScrollPagination(
getWorkflowVersionList,
{
itemHeight: 40,

View File

@@ -1,5 +1,5 @@
import { useUserStore } from '@/web/support/user/useUserStore';
import React, { useMemo } from 'react';
import React, { useCallback, useMemo, useState } from 'react';
import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type';
import { streamFetch } from '@/web/common/api/fetch';
import { getMaxHistoryLimitFromNodes } from '@fastgpt/global/core/workflow/runtime/utils';
@@ -12,9 +12,10 @@ import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import dynamic from 'next/dynamic';
import { useChat } from '@/components/core/chat/ChatContainer/useChat';
import { Box } from '@chakra-ui/react';
import { Box, BoxProps } from '@chakra-ui/react';
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import { ChatSiteItemType } from '@fastgpt/global/core/chat/type';
const PluginRunBox = dynamic(() => import('@/components/core/chat/ChatContainer/PluginRunBox'));
@@ -29,6 +30,7 @@ export const useChatTest = ({
}) => {
const { userInfo } = useUserStore();
const { appDetail } = useContextSelector(AppContext, (v) => v);
const [chatRecords, setChatRecords] = useState<ChatSiteItemType[]>([]);
const startChat = useMemoizedFn(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => {
@@ -60,15 +62,26 @@ export const useChatTest = ({
return nodes.find((node) => node.flowNodeType === FlowNodeTypeEnum.pluginInput)?.inputs || [];
}, [nodes]);
const {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
clearChatRecords
} = useChat();
const { ChatBoxRef, variablesForm, pluginRunTab, setPluginRunTab, clearChatRecords } = useChat();
// Mock ScrollData
const ScrollData = useCallback(
({
children,
ScrollContainerRef,
...props
}: {
ScrollContainerRef?: React.RefObject<HTMLDivElement>;
children: React.ReactNode;
} & BoxProps) => {
return (
<Box ref={ScrollContainerRef} {...props} overflow={'overlay'}>
{children}
</Box>
);
},
[]
);
const CustomChatContainer = useMemoizedFn(() =>
appDetail.type === AppTypeEnum.plugin ? (
@@ -82,13 +95,17 @@ export const useChatTest = ({
chatConfig={appDetail.chatConfig}
tab={pluginRunTab}
setTab={setPluginRunTab}
onNewChat={clearChatRecords}
onNewChat={() => {
clearChatRecords();
setChatRecords([]);
}}
onStartChat={startChat}
/>
</Box>
) : (
<ChatBox
ref={ChatBoxRef}
ScrollData={ScrollData}
chatHistories={chatRecords}
setChatHistories={setChatRecords}
variablesForm={variablesForm}

View File

@@ -28,13 +28,15 @@ const ChatHeader = ({
history,
showHistory,
apps,
onRouteToAppDetail
onRouteToAppDetail,
totalRecordsCount
}: {
history: ChatItemType[];
showHistory?: boolean;
chatData: InitChatResponse;
apps?: AppListItemType[];
onRouteToAppDetail?: () => void;
totalRecordsCount: number;
}) => {
const { t } = useTranslation();
const isPlugin = chatData.app.type === AppTypeEnum.plugin;
@@ -52,9 +54,9 @@ const ChatHeader = ({
{isPc ? (
<>
<PcHeader
totalRecordsCount={totalRecordsCount}
title={chatData.title || t('common:core.chat.New Chat')}
chatModels={chatData.app.chatModels}
history={history}
/>
<Box flex={1} />
</>
@@ -91,6 +93,7 @@ const MobileDrawer = ({
const router = useRouter();
const isTeamChat = router.pathname === '/chat/team';
const [currentTab, setCurrentTab] = useState<TabEnum>(TabEnum.recently);
const getAppList = useCallback(async ({ parentId }: GetResourceFolderListProps) => {
return getMyApps({ parentId }).then((res) =>
res.map<GetResourceListItemResponse>((item) => ({
@@ -102,6 +105,7 @@ const MobileDrawer = ({
);
}, []);
const { onChangeAppId } = useContextSelector(ChatContext, (v) => v);
const onclickApp = (id: string) => {
onChangeAppId(id);
onCloseDrawer();
@@ -251,13 +255,14 @@ const MobileHeader = ({
export const PcHeader = ({
title,
chatModels,
history
totalRecordsCount
}: {
title: string;
chatModels?: string[];
history: ChatItemType[];
totalRecordsCount: number;
}) => {
const { t } = useTranslation();
return (
<>
<Box mr={3} maxW={'200px'} className="textEllipsis" color={'myGray.1000'}>
@@ -266,9 +271,9 @@ export const PcHeader = ({
<MyTag>
<MyIcon name={'history'} w={'14px'} />
<Box ml={1}>
{history.length === 0
{totalRecordsCount === 0
? t('common:core.chat.New Chat')
: t('common:core.chat.History Amount', { amount: history.length })}
: t('common:core.chat.History Amount', { amount: totalRecordsCount })}
</Box>
</MyTag>
{!!chatModels && chatModels.length > 0 && (

View File

@@ -1,7 +1,7 @@
import React, { useCallback, useMemo, useState } from 'react';
import NextHead from '@/components/common/NextHead';
import { useRouter } from 'next/router';
import { delChatRecordById, getChatHistories, getInitChatInfo } from '@/web/core/chat/api';
import { delChatRecordById, getInitChatInfo } from '@/web/core/chat/api';
import { Box, Flex, Drawer, DrawerOverlay, DrawerContent, useTheme } from '@chakra-ui/react';
import { streamFetch } from '@/web/common/api/fetch';
import { useChatStore } from '@/web/core/chat/context/storeChat';
@@ -16,17 +16,15 @@ import SliderApps from './components/SliderApps';
import ChatHeader from './components/ChatHeader';
import { useUserStore } from '@/web/support/user/useUserStore';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { getMyApps } from '@/web/core/app/api';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { useMount } from 'ahooks';
import { useCreation, useMount } from 'ahooks';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { InitChatResponse } from '@/global/core/chat/api';
import { defaultChatData } from '@/global/core/chat/constants';
import { defaultChatData, GetChatTypeEnum } from '@/global/core/chat/constants';
import ChatContextProvider, { ChatContext } from '@/web/core/chat/context/chatContext';
import { AppListItemType } from '@fastgpt/global/core/app/type';
import { useContextSelector } from 'use-context-selector';
@@ -35,6 +33,7 @@ import dynamic from 'next/dynamic';
import { useChat } from '@/components/core/chat/ChatContainer/useChat';
import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { InitChatResponse } from '@/global/core/chat/api';
const CustomPluginRunBox = dynamic(() => import('./components/CustomPluginRunBox'));
@@ -50,12 +49,11 @@ const Chat = ({
const router = useRouter();
const theme = useTheme();
const { t } = useTranslation();
const { userInfo } = useUserStore();
const { isPc } = useSystem();
const { setLastChatAppId } = useChatStore();
const {
setHistories: setRecordHistories,
loadHistories: loadRecordHistories,
histories: recordHistories,
onUpdateHistory,
onClearHistories,
onDelHistory,
@@ -65,39 +63,43 @@ const Chat = ({
onChangeChatId,
onUpdateHistoryTitle
} = useContextSelector(ChatContext, (v) => v);
const params = useCreation(() => {
return {
chatId,
appId,
type: GetChatTypeEnum.normal
};
}, [appId, chatId]);
const {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
resetChatRecords
resetVariables,
useChatScrollData
} = useChat();
const { userInfo } = useUserStore();
const { isPc } = useSystem();
const {
data: chatRecords,
ScrollData,
setData: setChatRecords,
total: totalRecordsCount
} = useChatScrollData(params);
// get chat app info
const [chatData, setChatData] = useState<InitChatResponse>(defaultChatData);
const isPlugin = chatData.app.type === AppTypeEnum.plugin;
const { loading } = useRequest2(
// Load chat init data
const { loading: isLoading } = useRequest2(
async () => {
if (!appId || forbidLoadChat.current) return;
const res = await getInitChatInfo({ appId, chatId });
setChatData(res);
const history = res.history.map((item) => ({
...item,
dataId: item.dataId || getNanoid(),
status: ChatStatusEnum.finish
}));
// reset chat records
resetChatRecords({
records: history,
// reset chat variables
resetVariables({
variables: res.variables
});
@@ -162,6 +164,7 @@ const Chat = ({
},
[chatId, appId, onUpdateHistoryTitle, forbidLoadChat, onChangeChatId]
);
const loading = isLoading;
return (
<Flex h={'100%'}>
@@ -223,6 +226,7 @@ const Chat = ({
>
{/* header */}
<ChatHeader
totalRecordsCount={totalRecordsCount}
apps={myApps}
chatData={chatData}
history={chatRecords}
@@ -247,6 +251,7 @@ const Chat = ({
/>
) : (
<ChatBox
ScrollData={ScrollData}
ref={ChatBoxRef}
chatHistories={chatRecords}
setChatHistories={setChatRecords}

View File

@@ -28,7 +28,7 @@ import NextHead from '@/components/common/NextHead';
import { useContextSelector } from 'use-context-selector';
import ChatContextProvider, { ChatContext } from '@/web/core/chat/context/chatContext';
import { InitChatResponse } from '@/global/core/chat/api';
import { defaultChatData } from '@/global/core/chat/constants';
import { defaultChatData, GetChatTypeEnum } from '@/global/core/chat/constants';
import { useMount } from 'ahooks';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
@@ -92,13 +92,27 @@ const OutLink = ({
const {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
resetChatRecords
resetVariables,
useChatScrollData
} = useChat();
const params = useMemo(() => {
return {
chatId,
shareId,
outLinkUid,
appId: chatData.appId,
type: GetChatTypeEnum.outLink
};
}, [chatData.appId, chatId, outLinkUid, shareId]);
const {
data: chatRecords,
ScrollData,
setData: setChatRecords,
total: totalRecordsCount
} = useChatScrollData(params);
const startChat = useCallback(
async ({
@@ -179,7 +193,7 @@ const OutLink = ({
]
);
const { loading } = useRequest2(
const { loading: isLoading } = useRequest2(
async () => {
if (!shareId || !outLinkUid || forbidLoadChat.current) return;
@@ -190,14 +204,7 @@ const OutLink = ({
});
setChatData(res);
const history = res.history.map((item) => ({
...item,
dataId: item.dataId || nanoid(),
status: ChatStatusEnum.finish
}));
resetChatRecords({
records: history,
resetVariables({
variables: res.variables
});
},
@@ -229,6 +236,7 @@ const OutLink = ({
useMount(() => {
setIdEmbed(window !== top);
});
const loading = isLoading;
return (
<>
@@ -303,6 +311,7 @@ const OutLink = ({
<ChatHeader
chatData={chatData}
history={chatRecords}
totalRecordsCount={totalRecordsCount}
showHistory={showHistory === '1'}
/>
) : null}
@@ -322,6 +331,7 @@ const OutLink = ({
/>
) : (
<ChatBox
ScrollData={ScrollData}
ref={ChatBoxRef}
chatHistories={chatRecords}
setChatHistories={setChatRecords}

View File

@@ -26,7 +26,7 @@ import ChatContextProvider, { ChatContext } from '@/web/core/chat/context/chatCo
import { AppListItemType } from '@fastgpt/global/core/app/type';
import { useContextSelector } from 'use-context-selector';
import { InitChatResponse } from '@/global/core/chat/api';
import { defaultChatData } from '@/global/core/chat/constants';
import { defaultChatData, GetChatTypeEnum } from '@/global/core/chat/constants';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useChat } from '@/components/core/chat/ChatContainer/useChat';
@@ -58,7 +58,6 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
const {
onUpdateHistoryTitle,
loadHistories,
onUpdateHistory,
onClearHistories,
onDelHistory,
@@ -70,13 +69,27 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
const {
ChatBoxRef,
chatRecords,
setChatRecords,
variablesForm,
pluginRunTab,
setPluginRunTab,
resetChatRecords
resetVariables,
useChatScrollData
} = useChat();
const params = useMemo(() => {
return {
appId,
chatId,
teamId,
teamToken,
type: GetChatTypeEnum.team
};
}, [appId, chatId, teamId, teamToken]);
const {
data: chatRecords,
ScrollData,
setData: setChatRecords,
total: totalRecordsCount
} = useChatScrollData(params);
const startChat = useCallback(
async ({
@@ -138,22 +151,15 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
);
// get chat app info
const { loading } = useRequest2(
const { loading: isLoading } = useRequest2(
async () => {
if (!appId || forbidLoadChat.current) return;
const res = await getTeamChatInfo({ teamId, appId, chatId, teamToken });
setChatData(res);
const history = res.history.map((item) => ({
...item,
dataId: item.dataId || nanoid(),
status: ChatStatusEnum.finish
}));
// reset chat records
resetChatRecords({
records: history,
resetVariables({
variables: res.variables
});
},
@@ -175,6 +181,8 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
}
);
const loading = isLoading;
return (
<Flex h={'100%'}>
<NextHead title={chatData.app.name} icon={chatData.app.avatar}></NextHead>
@@ -235,7 +243,13 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
flexDirection={'column'}
>
{/* header */}
<ChatHeader apps={myApps} chatData={chatData} history={chatRecords} showHistory />
<ChatHeader
totalRecordsCount={totalRecordsCount}
apps={myApps}
chatData={chatData}
history={chatRecords}
showHistory
/>
{/* chat box */}
<Box flex={1}>
{chatData.app.type === AppTypeEnum.plugin ? (
@@ -253,6 +267,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
) : (
<ChatBox
ref={ChatBoxRef}
ScrollData={ScrollData}
chatHistories={chatRecords}
setChatHistories={setChatRecords}
variablesForm={variablesForm}

View File

@@ -18,7 +18,7 @@ import {
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import MyInput from '@/components/MyInput';
import { DatasetTagType } from '@fastgpt/global/core/dataset/type';
import { ScrollListType, useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { ScrollListType, useVirtualScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import PopoverConfirm from '@fastgpt/web/components/common/MyPopover/PopoverConfirm';
import { DatasetCollectionsListItemType } from '@/global/core/dataset/type';
@@ -127,7 +127,7 @@ const TagManageModal = ({ onClose }: { onClose: () => void }) => {
isLoading: isRequesting,
fetchData,
total: tagsTotal
} = useScrollPagination(getDatasetCollectionTags, {
} = useVirtualScrollPagination(getDatasetCollectionTags, {
refreshDeps: [''],
// debounceWait: 300,
@@ -146,7 +146,7 @@ const TagManageModal = ({ onClose }: { onClose: () => void }) => {
scrollDataList: collectionsList,
ScrollList: ScrollListCollections,
isLoading: collectionsListLoading
} = useScrollPagination(getScrollCollectionList, {
} = useVirtualScrollPagination(getScrollCollectionList, {
refreshDeps: [searchText],
// debounceWait: 300,

View File

@@ -15,7 +15,6 @@ import MyIcon from '@fastgpt/web/components/common/Icon';
import MyInput from '@/components/MyInput';
import InputDataModal from '../components/InputDataModal';
import RawSourceBox from '@/components/core/dataset/RawSourceBox';
import { usePagination } from '@fastgpt/web/hooks/usePagination';
import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection/utils';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
@@ -27,7 +26,8 @@ import TagsPopOver from './CollectionCard/TagsPopOver';
import { useSystemStore } from '@/web/common/system/useSystemStore';
import MyDivider from '@fastgpt/web/components/common/MyDivider';
import Markdown from '@/components/Markdown';
import { DatasetDataListItemType } from '@/global/core/dataset/type';
import { useMemoizedFn } from 'ahooks';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
const DataCard = () => {
const theme = useTheme();
@@ -43,10 +43,6 @@ const DataCard = () => {
const { t } = useTranslation();
const [searchText, setSearchText] = useState('');
const { toast } = useToast();
const { openConfirm, ConfirmModal } = useConfirm({
content: t('common:dataset.Confirm to delete the data'),
type: 'delete'
});
const scrollParams = useMemo(
() => ({
@@ -55,19 +51,21 @@ const DataCard = () => {
}),
[collectionId, searchText]
);
const EmptyTipDom = useMemo(
() => <EmptyTip text={t('common:core.dataset.data.Empty Tip')} />,
[t]
);
const {
data: datasetDataList,
ScrollData,
total,
isLoading,
refresh,
refreshList,
setData: setDatasetDataList
} = usePagination<DatasetDataListItemType>({
api: getDatasetDataList,
pageSize: 10,
type: 'scroll',
} = useScrollPagination(getDatasetDataList, {
pageSize: 15,
params: scrollParams,
refreshDeps: [searchText, collectionId]
refreshDeps: [searchText, collectionId],
EmptyTip: EmptyTipDom
});
const [editDataId, setEditDataId] = useState<string>();
@@ -89,8 +87,32 @@ const DataCard = () => {
const canWrite = useMemo(() => datasetDetail.permission.hasWritePer, [datasetDetail]);
const { openConfirm, ConfirmModal } = useConfirm({
content: t('common:dataset.Confirm to delete the data'),
type: 'delete'
});
const onDeleteOneData = useMemoizedFn((dataId: string) => {
openConfirm(async () => {
try {
await delOneDatasetDataById(dataId);
setDatasetDataList((prev) => {
return prev.filter((data) => data._id !== dataId);
});
toast({
title: t('common:common.Delete Success'),
status: 'success'
});
} catch (error) {
toast({
title: getErrText(error),
status: 'error'
});
}
})();
});
return (
<MyBox position={'relative'} py={[1, 0]} h={'100%'}>
<MyBox py={[1, 0]} h={'100%'}>
<Flex flexDirection={'column'} h={'100%'}>
{/* Header */}
<Flex alignItems={'center'} px={6}>
@@ -163,7 +185,7 @@ const DataCard = () => {
/>
</Flex>
{/* data */}
<ScrollData flex={'1 0 0'} px={5} pb={5}>
<ScrollData px={5} pb={5}>
<Flex flexDir={'column'} gap={2}>
{datasetDataList.map((item, index) => (
<Card
@@ -185,7 +207,6 @@ const DataCard = () => {
}}
onClick={(e) => {
e.stopPropagation();
if (!collection) return;
setEditDataId(item._id);
}}
>
@@ -277,23 +298,7 @@ const DataCard = () => {
size={'xsSquare'}
onClick={(e) => {
e.stopPropagation();
openConfirm(async () => {
try {
await delOneDatasetDataById(item._id);
setDatasetDataList((prev) => {
return prev.filter((data) => data._id !== item._id);
});
toast({
title: t('common:common.Delete Success'),
status: 'success'
});
} catch (error) {
toast({
title: getErrText(error),
status: 'error'
});
}
})();
onDeleteOneData(item._id);
}}
aria-label={''}
/>
@@ -303,9 +308,6 @@ const DataCard = () => {
))}
</Flex>
</ScrollData>
{total === 0 && !isLoading && (
<EmptyTip text={t('common:core.dataset.data.Empty Tip')}></EmptyTip>
)}
</Flex>
{editDataId !== undefined && collection && (
@@ -315,7 +317,7 @@ const DataCard = () => {
onClose={() => setEditDataId(undefined)}
onSuccess={(data) => {
if (editDataId === '') {
refresh();
refreshList();
return;
}
setDatasetDataList((prev) => {

View File

@@ -29,6 +29,5 @@ declare global {
umami?: {
track: (event: TrackEventName, data: any) => void;
};
windowHidden: boolean;
}
}

View File

@@ -100,7 +100,7 @@ export const streamFetch = ({
return finish();
}
window.windowHidden
document.hidden
? setTimeout(animateResponseText, 16)
: requestAnimationFrame(animateResponseText);
}

View File

@@ -13,8 +13,3 @@ export const getWebLLMModel = (model?: string) => {
const list = useSystemStore.getState().llmModelList;
return list.find((item) => item.model === model || item.name === model) ?? list[0];
};
export const watchWindowHidden = () => {
// @ts-ignore
window.windowHidden = document.hidden;
};

View File

@@ -1,5 +1,10 @@
import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
import type { ChatHistoryItemType, ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import type {
ChatHistoryItemType,
ChatHistoryItemResType,
ChatSiteItemType,
ChatItemType
} from '@fastgpt/global/core/chat/type.d';
import { getResDataQuery } from '@/pages/api/core/chat/getResData';
import type {
CloseCustomFeedbackParams,
@@ -21,6 +26,10 @@ import { UpdateChatFeedbackProps } from '@fastgpt/global/core/chat/api';
import { AuthTeamTagTokenProps } from '@fastgpt/global/support/user/team/tag';
import { AppListItemType } from '@fastgpt/global/core/app/type';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import type {
getPaginationRecordsBody,
getPaginationRecordsResponse
} from '@/pages/api/core/chat/getPaginationRecords';
/**
* 获取初始化聊天内容
@@ -31,6 +40,7 @@ export const getInitOutLinkChatInfo = (data: InitOutLinkChatProps) =>
GET<InitChatResponse>(`/core/chat/outLink/init`, data);
export const getTeamChatInfo = (data: InitTeamChatProps) =>
GET<InitChatResponse>(`/core/chat/team/init`, data);
/**
* get current window history(appid or shareId)
*/
@@ -41,6 +51,10 @@ export const getChatHistories = (data: PaginationProps<GetHistoriesProps>) =>
*/
export const getChatResData = (data: getResDataQuery) =>
GET<ChatHistoryItemResType[]>(`/core/chat/getResData`, data);
export const getChatRecords = (data: getPaginationRecordsBody) =>
POST<getPaginationRecordsResponse>('core/chat/getPaginationRecords', data);
/**
* delete one history
*/

View File

@@ -13,7 +13,7 @@ import { ClearHistoriesProps, DelHistoryProps, UpdateHistoryProps } from '@/glob
import { BoxProps, useDisclosure } from '@chakra-ui/react';
import { useChatStore } from './storeChat';
import { getNanoid } from '@fastgpt/global/common/string/tools';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { useVirtualScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
type ChatContextValueType = {
params: Record<string, string | number>;
@@ -111,7 +111,7 @@ const ChatContextProvider = ({
setData: setHistories,
fetchData: loadHistories,
totalData: histories
} = useScrollPagination(getChatHistories, {
} = useVirtualScrollPagination(getChatHistories, {
overscan: 30,
pageSize: 30,
itemHeight: 52,
@@ -132,7 +132,6 @@ const ChatContextProvider = ({
}
});
}
onCloseSlider();
},
[chatId, onCloseSlider, router, setLastChatId]

View File

@@ -25,7 +25,7 @@ export const getCountChatInputGuideTotal = (data: countChatInputGuideTotalQuery)
* Get chat input guide list
*/
export const getChatInputGuideList = (data: ChatInputGuideProps) =>
GET<ChatInputGuideResponse>(`/core/chat/inputGuide/list`, data);
POST<ChatInputGuideResponse>(`/core/chat/inputGuide/list`, data);
export const queryChatInputGuideList = (data: QueryChatInputGuideBody, url?: string) => {
if (url) {

View File

@@ -48,11 +48,14 @@ import type {
import type { readCollectionSourceResponse } from '@/pages/api/core/dataset/collection/read';
import type { GetDatasetListBody } from '@/pages/api/core/dataset/list';
import type { UpdateDatasetCollectionParams } from '@/pages/api/core/dataset/collection/update';
import type { GetDatasetDataListProps } from '@/pages/api/core/dataset/data/list';
import type {
GetDatasetDataListProps,
GetDatasetDataListRes
} from '@/pages/api/core/dataset/data/list';
import type { UpdateDatasetDataProps } from '@fastgpt/global/core/dataset/controller';
import type { DatasetFolderCreateBody } from '@/pages/api/core/dataset/folder/create';
import { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import { GetScrollCollectionsProps } from '@/pages/api/core/dataset/collection/scrollList';
import type { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import type { GetScrollCollectionsProps } from '@/pages/api/core/dataset/collection/scrollList';
/* ======================== dataset ======================= */
export const getDatasets = (data: GetDatasetListBody) =>
@@ -142,13 +145,13 @@ export const getDatasetCollectionTags = (
datasetId: string;
searchText?: string;
}>
) => GET<PaginationResponse<DatasetTagType>>(`/proApi/core/dataset/tag/list`, data);
) => POST<PaginationResponse<DatasetTagType>>(`/proApi/core/dataset/tag/list`, data);
export const getTagUsage = (datasetId: string) =>
GET<TagUsageType[]>(`/proApi/core/dataset/tag/tagUsage?datasetId=${datasetId}`);
export const getAllTags = (datasetId: string) =>
GET<{ list: DatasetTagType[] }>(`/proApi/core/dataset/tag/getAllTags?datasetId=${datasetId}`);
export const getScrollCollectionList = (data: GetScrollCollectionsProps) =>
GET<PaginationResponse<DatasetCollectionsListItemType>>(
POST<PaginationResponse<DatasetCollectionsListItemType>>(
`/core/dataset/collection/scrollList`,
data
);
@@ -156,7 +159,7 @@ export const getScrollCollectionList = (data: GetScrollCollectionsProps) =>
/* =============================== data ==================================== */
/* get dataset list */
export const getDatasetDataList = (data: GetDatasetDataListProps) =>
POST<PagingData<DatasetDataListItemType>>(`/core/dataset/data/list`, data);
POST<GetDatasetDataListRes>(`/core/dataset/data/list`, data);
export const getDatasetDataItemById = (id: string) =>
GET<DatasetDataItemType>(`/core/dataset/data/detail`, { id });

View File

@@ -142,7 +142,7 @@ export const DatasetPageContextProvider = ({
const { list } = await getDatasetCollectionTags({
datasetId: datasetDetail._id,
searchText: searchTagKey,
current: 1,
offset: 0,
pageSize: 15
});
return list;