perf: save chat and del chat content;UI

This commit is contained in:
archer
2023-04-30 13:18:54 +08:00
parent c971adaabd
commit f109f1cf60
19 changed files with 100 additions and 95 deletions

View File

@@ -31,5 +31,5 @@ export const postSaveChat = (data: {
/** /**
* 删除一句对话 * 删除一句对话
*/ */
export const delChatRecordByIndex = (chatId: string, index: number) => export const delChatRecordByIndex = (chatId: string, contentId: string) =>
DELETE(`/chat/delChatRecordByIndex?chatId=${chatId}&index=${index}`); DELETE(`/chat/delChatRecordByContentId?chatId=${chatId}&contentId=${contentId}`);

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo'; import { connectToDatabase } from '@/service/mongo';
import { getOpenAIApi, authChat } from '@/service/utils/auth'; import { getOpenAIApi, authChat } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools'; import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream'; import { PassThrough } from 'stream';
import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model'; import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model';
@@ -28,7 +28,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
try { try {
const { chatId, prompt, modelId } = req.body as { const { chatId, prompt, modelId } = req.body as {
prompt: ChatItemType; prompt: ChatItemSimpleType;
modelId: string; modelId: string;
chatId: '' | string; chatId: '' | string;
}; };
@@ -118,7 +118,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const filterPrompts = openaiChatFilter({ const filterPrompts = openaiChatFilter({
model: model.chat.chatModel, model: model.chat.chatModel,
prompts, prompts,
maxTokens: modelConstantsData.contextMaxToken - 500 maxTokens: modelConstantsData.contextMaxToken - 300
}); });
// 计算温度 // 计算温度

View File

@@ -5,13 +5,13 @@ import { authToken } from '@/service/utils/auth';
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
const { chatId, index } = req.query as { chatId: string; index: string }; const { chatId, contentId } = req.query as { chatId: string; contentId: string };
const { authorization } = req.headers; const { authorization } = req.headers;
if (!authorization) { if (!authorization) {
throw new Error('无权操作'); throw new Error('无权操作');
} }
if (!chatId || !index) { if (!chatId || !contentId) {
throw new Error('缺少参数'); throw new Error('缺少参数');
} }
@@ -26,30 +26,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('找不到对话'); throw new Error('找不到对话');
} }
// 重新计算 index跳过已经被删除的内容 // 删除一条数据库记录
let unDeleteIndex = +index;
let deletedIndex = 0;
for (deletedIndex = 0; deletedIndex < chatRecord.content.length; deletedIndex++) {
if (!chatRecord.content[deletedIndex].deleted) {
unDeleteIndex--;
if (unDeleteIndex < 0) {
break;
}
}
}
// 删除最一条数据库记录, 也就是预发送的那一条
await Chat.updateOne( await Chat.updateOne(
{ {
_id: chatId, _id: chatId,
userId userId
}, },
{ { $pull: { content: { _id: contentId } } }
$set: {
[`content.${deletedIndex}.deleted`]: true,
updateTime: Date.now()
}
}
); );
jsonRes(res); jsonRes(res);

View File

@@ -36,20 +36,22 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
userId: new mongoose.Types.ObjectId(userId) userId: new mongoose.Types.ObjectId(userId)
} }
}, },
{ $unwind: '$content' },
{ $match: { 'content.deleted': false } },
{ $sort: { 'content._id': -1 } },
{ $limit: 50 },
{ {
$project: { $project: {
id: '$content._id', content: {
$slice: ['$content', -50] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
_id: '$content._id',
obj: '$content.obj', obj: '$content.obj',
value: '$content.value' value: '$content.value'
} }
} }
]); ]);
history.reverse();
} }
jsonRes<InitChatResponse>(res, { jsonRes<InitChatResponse>(res, {

View File

@@ -4,6 +4,7 @@ import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat } from '@/service/mongo'; import { connectToDatabase, Chat } from '@/service/mongo';
import { authModel } from '@/service/utils/auth'; import { authModel } from '@/service/utils/auth';
import { authToken } from '@/service/utils/auth'; import { authToken } from '@/service/utils/auth';
import mongoose from 'mongoose';
/* 聊天内容存存储 */ /* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -23,6 +24,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase(); await connectToDatabase();
const content = prompts.map((item) => ({ const content = prompts.map((item) => ({
_id: new mongoose.Types.ObjectId(item._id),
obj: item.obj, obj: item.obj,
value: item.value value: item.value
})); }));

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo'; import { connectToDatabase } from '@/service/mongo';
import { getOpenAIApi, authOpenApiKey, authModel } from '@/service/utils/auth'; import { getOpenAIApi, authOpenApiKey, authModel } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools'; import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream'; import { PassThrough } from 'stream';
import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model'; import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model';
@@ -32,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
modelId, modelId,
isStream = true isStream = true
} = req.body as { } = req.body as {
prompts: ChatItemType[]; prompts: ChatItemSimpleType[];
modelId: string; modelId: string;
isStream: boolean; isStream: boolean;
}; };
@@ -132,7 +132,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const filterPrompts = openaiChatFilter({ const filterPrompts = openaiChatFilter({
model: model.chat.chatModel, model: model.chat.chatModel,
prompts, prompts,
maxTokens: modelConstantsData.contextMaxToken - 500 maxTokens: modelConstantsData.contextMaxToken - 300
}); });
// 计算温度 // 计算温度

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo'; import { connectToDatabase, Model } from '@/service/mongo';
import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth'; import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter } from '@/service/utils/tools'; import { axiosConfig, openaiChatFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream'; import { PassThrough } from 'stream';
import { modelList } from '@/constants/model'; import { modelList } from '@/constants/model';
@@ -31,7 +31,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
modelId, modelId,
isStream = true isStream = true
} = req.body as { } = req.body as {
prompts: ChatItemType[]; prompts: ChatItemSimpleType[];
modelId: string; modelId: string;
isStream: boolean; isStream: boolean;
}; };
@@ -77,7 +77,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const filterPrompts = openaiChatFilter({ const filterPrompts = openaiChatFilter({
model: model.chat.chatModel, model: model.chat.chatModel,
prompts, prompts,
maxTokens: modelConstantsData.contextMaxToken - 500 maxTokens: modelConstantsData.contextMaxToken - 300
}); });
// console.log(filterPrompts); // console.log(filterPrompts);

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo'; import { connectToDatabase, Model } from '@/service/mongo';
import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth'; import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth';
import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools'; import { axiosConfig, openaiChatFilter, systemPromptFilter } from '@/service/utils/tools';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream'; import { PassThrough } from 'stream';
import { modelList, ModelVectorSearchModeMap, ChatModelEnum } from '@/constants/model'; import { modelList, ModelVectorSearchModeMap, ChatModelEnum } from '@/constants/model';
@@ -32,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
modelId, modelId,
isStream = true isStream = true
} = req.body as { } = req.body as {
prompt: ChatItemType; prompt: ChatItemSimpleType;
modelId: string; modelId: string;
isStream: boolean; isStream: boolean;
}; };
@@ -156,7 +156,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const filterPrompts = openaiChatFilter({ const filterPrompts = openaiChatFilter({
model: model.chat.chatModel, model: model.chat.chatModel,
prompts, prompts,
maxTokens: modelConstantsData.contextMaxToken - 500 maxTokens: modelConstantsData.contextMaxToken - 300
}); });
// console.log(filterPrompts); // console.log(filterPrompts);

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase, Model } from '@/service/mongo'; import { connectToDatabase, Model } from '@/service/mongo';
import { axiosConfig, systemPromptFilter, openaiChatFilter } from '@/service/utils/tools'; import { axiosConfig, systemPromptFilter, openaiChatFilter } from '@/service/utils/tools';
import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth'; import { getOpenAIApi, authOpenApiKey } from '@/service/utils/auth';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { PassThrough } from 'stream'; import { PassThrough } from 'stream';
import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model'; import { modelList, ModelVectorSearchModeMap, ModelVectorSearchModeEnum } from '@/constants/model';
@@ -32,7 +32,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
modelId, modelId,
isStream = true isStream = true
} = req.body as { } = req.body as {
prompts: ChatItemType[]; prompts: ChatItemSimpleType[];
modelId: string; modelId: string;
isStream: boolean; isStream: boolean;
}; };
@@ -127,7 +127,7 @@ ${
const filterPrompts = openaiChatFilter({ const filterPrompts = openaiChatFilter({
model: model.chat.chatModel, model: model.chat.chatModel,
prompts, prompts,
maxTokens: modelConstantsData.contextMaxToken - 500 maxTokens: modelConstantsData.contextMaxToken - 300
}); });
// console.log(filterPrompts); // console.log(filterPrompts);

View File

@@ -29,8 +29,7 @@ import { streamFetch } from '@/api/fetch';
import Icon from '@/components/Icon'; import Icon from '@/components/Icon';
import MyIcon from '@/components/Icon'; import MyIcon from '@/components/Icon';
import { throttle } from 'lodash'; import { throttle } from 'lodash';
import { customAlphabet } from 'nanoid'; import mongoose from 'mongoose';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 5);
const SlideBar = dynamic(() => import('./components/SlideBar')); const SlideBar = dynamic(() => import('./components/SlideBar'));
const Empty = dynamic(() => import('./components/Empty')); const Empty = dynamic(() => import('./components/Empty'));
@@ -41,7 +40,6 @@ import styles from './index.module.scss';
const textareaMinH = '22px'; const textareaMinH = '22px';
export type ChatSiteItemType = { export type ChatSiteItemType = {
id: string;
status: 'loading' | 'finish'; status: 'loading' | 'finish';
} & ChatItemType; } & ChatItemType;
@@ -136,10 +134,8 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
setChatData({ setChatData({
...res, ...res,
history: res.history.map((item: any, i) => ({ history: res.history.map((item) => ({
obj: item.obj, ...item,
value: item.value,
id: item.id || `${nanoid()}-${i}`,
status: 'finish' status: 'finish'
})) }))
}); });
@@ -191,15 +187,15 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// gpt 对话 // gpt 对话
const gptChatPrompt = useCallback( const gptChatPrompt = useCallback(
async (prompts: ChatSiteItemType) => { async (prompts: ChatSiteItemType[]) => {
// create abort obj // create abort obj
const abortSignal = new AbortController(); const abortSignal = new AbortController();
controller.current = abortSignal; controller.current = abortSignal;
isResetPage.current = false; isResetPage.current = false;
const prompt = { const prompt = {
obj: prompts.obj, obj: prompts[0].obj,
value: prompts.value value: prompts[0].value
}; };
// 流请求,获取数据 // 流请求,获取数据
@@ -238,8 +234,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
modelId, modelId,
chatId, chatId,
prompts: [ prompts: [
prompt,
{ {
_id: prompts[0]._id,
obj: 'Human',
value: prompt.value
},
{
_id: prompts[1]._id,
obj: 'AI', obj: 'AI',
value: responseText value: responseText
} }
@@ -299,13 +300,13 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
const newChatList: ChatSiteItemType[] = [ const newChatList: ChatSiteItemType[] = [
...chatData.history, ...chatData.history,
{ {
id: nanoid(), _id: String(new mongoose.Types.ObjectId()),
obj: 'Human', obj: 'Human',
value: val, value: val,
status: 'finish' status: 'finish'
}, },
{ {
id: nanoid(), _id: String(new mongoose.Types.ObjectId()),
obj: 'AI', obj: 'AI',
value: '', value: '',
status: 'loading' status: 'loading'
@@ -325,7 +326,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
}, 100); }, 100);
try { try {
await gptChatPrompt(newChatList[newChatList.length - 2]); await gptChatPrompt(newChatList.slice(-2));
} catch (err: any) { } catch (err: any) {
toast({ toast({
title: typeof err === 'string' ? err : err?.message || '聊天出错了~', title: typeof err === 'string' ? err : err?.message || '聊天出错了~',
@@ -345,11 +346,11 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// 删除一句话 // 删除一句话
const delChatRecord = useCallback( const delChatRecord = useCallback(
async (index: number) => { async (index: number, id: string) => {
setLoading(true); setLoading(true);
try { try {
// 删除数据库最后一句 // 删除数据库最后一句
await delChatRecordByIndex(chatId, index); await delChatRecordByIndex(chatId, id);
setChatData((state) => ({ setChatData((state) => ({
...state, ...state,
@@ -449,7 +450,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
<Box ref={ChatBox} pb={[4, 0]} flex={'1 0 0'} h={0} w={'100%'} overflowY={'auto'}> <Box ref={ChatBox} pb={[4, 0]} flex={'1 0 0'} h={0} w={'100%'} overflowY={'auto'}>
{chatData.history.map((item, index) => ( {chatData.history.map((item, index) => (
<Box <Box
key={item.id} key={item._id}
py={media(9, 6)} py={media(9, 6)}
px={media(4, 2)} px={media(4, 2)}
backgroundColor={ backgroundColor={
@@ -475,7 +476,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
</MenuButton> </MenuButton>
<MenuList fontSize={'sm'}> <MenuList fontSize={'sm'}>
<MenuItem onClick={() => onclickCopy(item.value)}></MenuItem> <MenuItem onClick={() => onclickCopy(item.value)}></MenuItem>
<MenuItem onClick={() => delChatRecord(index)}></MenuItem> <MenuItem onClick={() => delChatRecord(index, item._id)}></MenuItem>
</MenuList> </MenuList>
</Menu> </Menu>
<Box flex={'1 0 0'} w={0} overflow={'hidden'} id={`chat${index}`}> <Box flex={'1 0 0'} w={0} overflow={'hidden'} id={`chat${index}`}>
@@ -507,7 +508,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
_hover={{ _hover={{
color: 'red.600' color: 'red.600'
}} }}
onClick={() => delChatRecord(index)} onClick={() => delChatRecord(index, item._id)}
/> />
</Flex> </Flex>
)} )}

View File

@@ -78,7 +78,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
FastGPT FastGPT
</Box> </Box>
<form onSubmit={handleSubmit(onclickFindPassword)}> <form onSubmit={handleSubmit(onclickFindPassword)}>
<FormControl mt={8} isInvalid={!!errors.username}> <FormControl mt={5} isInvalid={!!errors.username}>
<Input <Input
placeholder="邮箱/手机号" placeholder="邮箱/手机号"
size={mediaLgMd} size={mediaLgMd}
@@ -95,7 +95,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.username && errors.username.message} {!!errors.username && errors.username.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.username}> <FormControl mt={5} isInvalid={!!errors.username}>
<Flex> <Flex>
<Input <Input
flex={1} flex={1}
@@ -121,7 +121,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.code && errors.code.message} {!!errors.code && errors.code.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.password}> <FormControl mt={5} isInvalid={!!errors.password}>
<Input <Input
type={'password'} type={'password'}
placeholder="新密码" placeholder="新密码"
@@ -142,7 +142,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.password && errors.password.message} {!!errors.password && errors.password.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.password2}> <FormControl mt={5} isInvalid={!!errors.password2}>
<Input <Input
type={'password'} type={'password'}
placeholder="确认密码" placeholder="确认密码"
@@ -168,7 +168,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
</Box> </Box>
<Button <Button
type="submit" type="submit"
mt={8} mt={5}
w={'100%'} w={'100%'}
size={mediaLgMd} size={mediaLgMd}
colorScheme="blue" colorScheme="blue"

View File

@@ -86,7 +86,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
FastGPT FastGPT
</Box> </Box>
<form onSubmit={handleSubmit(onclickRegister)}> <form onSubmit={handleSubmit(onclickRegister)}>
<FormControl mt={8} isInvalid={!!errors.username}> <FormControl mt={5} isInvalid={!!errors.username}>
<Input <Input
placeholder="邮箱/手机号" placeholder="邮箱/手机号"
size={mediaLgMd} size={mediaLgMd}
@@ -103,7 +103,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.username && errors.username.message} {!!errors.username && errors.username.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.username}> <FormControl mt={5} isInvalid={!!errors.username}>
<Flex> <Flex>
<Input <Input
flex={1} flex={1}
@@ -129,7 +129,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.code && errors.code.message} {!!errors.code && errors.code.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.password}> <FormControl mt={5} isInvalid={!!errors.password}>
<Input <Input
type={'password'} type={'password'}
placeholder="密码" placeholder="密码"
@@ -150,7 +150,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
{!!errors.password && errors.password.message} {!!errors.password && errors.password.message}
</FormErrorMessage> </FormErrorMessage>
</FormControl> </FormControl>
<FormControl mt={8} isInvalid={!!errors.password2}> <FormControl mt={5} isInvalid={!!errors.password2}>
<Input <Input
type={'password'} type={'password'}
placeholder="确认密码" placeholder="确认密码"
@@ -176,7 +176,7 @@ const RegisterForm = ({ setPageType, loginSuccess }: Props) => {
</Box> </Box>
<Button <Button
type="submit" type="submit"
mt={8} mt={5}
w={'100%'} w={'100%'}
size={mediaLgMd} size={mediaLgMd}
colorScheme="blue" colorScheme="blue"

View File

@@ -45,12 +45,19 @@ const Login = () => {
}, [router]); }, [router]);
return ( return (
<Box className={styles.loginPage} h={'100%'} p={isPc ? '10vh 10vw' : 0}>
<Flex <Flex
maxW={'1240px'} alignItems={'center'}
m={'auto'} justifyContent={'center'}
backgroundColor={'#fff'} className={styles.loginPage}
h={'100%'}
px={[0, '10vw']}
>
<Flex
height="100%" height="100%"
w={'100%'}
maxW={'1240px'}
maxH={['auto', '660px']}
backgroundColor={'#fff'}
alignItems={'center'} alignItems={'center'}
justifyContent={'center'} justifyContent={'center'}
p={10} p={10}
@@ -83,7 +90,7 @@ const Login = () => {
<DynamicComponent type={pageType} /> <DynamicComponent type={pageType} />
</Box> </Box>
</Flex> </Flex>
</Box> </Flex>
); );
}; };

View File

@@ -162,15 +162,22 @@ const SelectFileModal = ({
return ( return (
<Modal isOpen={true} onClose={onClose} isCentered> <Modal isOpen={true} onClose={onClose} isCentered>
<ModalOverlay /> <ModalOverlay />
<ModalContent maxW={'min(1000px, 90vw)'} m={0} position={'relative'} h={'90vh'}> <ModalContent
display={'flex'}
maxW={'min(1000px, 90vw)'}
m={0}
position={'relative'}
h={'90vh'}
>
<ModalHeader></ModalHeader> <ModalHeader></ModalHeader>
<ModalCloseButton /> <ModalCloseButton />
<ModalBody <ModalBody
flex={1}
h={0}
display={'flex'} display={'flex'}
flexDirection={'column'} flexDirection={'column'}
p={0} p={0}
h={'100%'}
alignItems={'center'} alignItems={'center'}
justifyContent={'center'} justifyContent={'center'}
fontSize={'sm'} fontSize={'sm'}

View File

@@ -23,7 +23,7 @@ export const openaiError: Record<string, string> = {
context_length_exceeded: '内容超长了,请重置对话', context_length_exceeded: '内容超长了,请重置对话',
Unauthorized: 'API-KEY 不合法', Unauthorized: 'API-KEY 不合法',
rate_limit_reached: 'API被限制请稍后再试', rate_limit_reached: 'API被限制请稍后再试',
'Bad Request': 'Bad Request~ openai 异常', 'Bad Request': 'Bad Request~ 可能内容太多了',
'Bad Gateway': '网关异常,请重试' 'Bad Gateway': '网关异常,请重试'
}; };
export const openaiError2: Record<string, string> = { export const openaiError2: Record<string, string> = {

View File

@@ -41,10 +41,6 @@ const ChatSchema = new Schema({
value: { value: {
type: String, type: String,
required: true required: true
},
deleted: {
type: Boolean,
default: false
} }
} }
], ],

View File

@@ -4,7 +4,7 @@ import jwt from 'jsonwebtoken';
import { Chat, Model, OpenApi, User } from '../mongo'; import { Chat, Model, OpenApi, User } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
import { getOpenApiKey } from './openai'; import { getOpenApiKey } from './openai';
import type { ChatItemType } from '@/types/chat'; import type { ChatItemSimpleType } from '@/types/chat';
import mongoose from 'mongoose'; import mongoose from 'mongoose';
import { defaultModel } from '@/constants/model'; import { defaultModel } from '@/constants/model';
import { formatPrice } from '@/utils/user'; import { formatPrice } from '@/utils/user';
@@ -94,14 +94,20 @@ export const authChat = async ({
const { model } = await authModel({ modelId, userId, authOwner: false, reserveDetail: true }); const { model } = await authModel({ modelId, userId, authOwner: false, reserveDetail: true });
// 聊天内容 // 聊天内容
let content: ChatItemType[] = []; let content: ChatItemSimpleType[] = [];
if (chatId) { if (chatId) {
// 获取 chat 数据 // 获取 chat 数据
content = await Chat.aggregate([ content = await Chat.aggregate([
{ $match: { _id: new mongoose.Types.ObjectId(chatId) } }, { $match: { _id: new mongoose.Types.ObjectId(chatId) } },
{
$project: {
content: {
$slice: ['$content', -50] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' }, { $unwind: '$content' },
{ $match: { 'content.deleted': false } },
{ {
$project: { $project: {
obj: '$content.obj', obj: '$content.obj',
@@ -110,7 +116,6 @@ export const authChat = async ({
} }
]); ]);
} }
// 获取 user 的 apiKey // 获取 user 的 apiKey
const { userApiKey, systemKey } = await getOpenApiKey(userId); const { userApiKey, systemKey } = await getOpenApiKey(userId);

View File

@@ -1,6 +1,6 @@
import crypto from 'crypto'; import crypto from 'crypto';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import { ChatItemType } from '@/types/chat'; import { ChatItemSimpleType } from '@/types/chat';
import { countChatTokens } from '@/utils/tools'; import { countChatTokens } from '@/utils/tools';
import { ChatCompletionRequestMessageRoleEnum, ChatCompletionRequestMessage } from 'openai'; import { ChatCompletionRequestMessageRoleEnum, ChatCompletionRequestMessage } from 'openai';
import { ChatModelEnum } from '@/constants/model'; import { ChatModelEnum } from '@/constants/model';
@@ -45,7 +45,7 @@ export const openaiChatFilter = ({
maxTokens maxTokens
}: { }: {
model: `${ChatModelEnum}`; model: `${ChatModelEnum}`;
prompts: ChatItemType[]; prompts: ChatItemSimpleType[];
maxTokens: number; maxTokens: number;
}) => { }) => {
// role map // role map
@@ -94,7 +94,7 @@ export const openaiChatFilter = ({
/* 整体 tokens 超出范围 */ /* 整体 tokens 超出范围 */
if (tokens >= maxTokens) { if (tokens >= maxTokens) {
break; return systemPrompt ? [systemPrompt, ...chats.slice(1)] : chats.slice(1);
} }
} }

6
src/types/chat.d.ts vendored
View File

@@ -1,5 +1,7 @@
export type ChatItemType = { export type ChatItemSimpleType = {
obj: 'Human' | 'AI' | 'SYSTEM'; obj: 'Human' | 'AI' | 'SYSTEM';
value: string; value: string;
deleted?: boolean;
}; };
export type ChatItemType = {
_id: string;
} & ChatItemSimpleType;