This commit is contained in:
archer
2023-05-10 19:59:19 +08:00
44 changed files with 718 additions and 368 deletions

View File

@@ -1,5 +1,5 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
import { clearToken } from '@/utils/user';
import { clearCookie } from '@/utils/user';
import { TOKEN_ERROR_CODE } from '@/service/errorCode';
interface ConfigType {
@@ -58,7 +58,7 @@ function responseError(err: any) {
// 有报错响应
const res = err.response;
if (res.data.code in TOKEN_ERROR_CODE) {
clearToken();
clearCookie();
return Promise.reject({ message: 'token过期重新登录' });
}
}

View File

@@ -64,6 +64,8 @@ export const postLogin = ({ username, password }: { username: string; password:
password: createHashPassword(password)
});
export const loginOut = () => GET('/user/loginout');
export const putUserInfo = (data: UserUpdateParams) => PUT('/user/update', data);
export const getUserBills = (data: RequestPaging) =>

View File

@@ -35,7 +35,7 @@ const Layout = ({ children, isPcDevice }: { children: JSX.Element; isPcDevice: b
return (
<>
<Box h={'100%'} overflowY={'auto'} bg={'gray.100'}>
<Box h={'100%'} bg={'gray.100'}>
{isPc ? (
pcUnShowLayoutRoute[router.pathname] ? (
<Auth>{children}</Auth>

View File

@@ -102,9 +102,7 @@ const Navbar = () => {
justifyContent={'center'}
onClick={() => {
if (item.link === router.asPath) return;
router.push(item.link, undefined, {
shallow: true
});
router.push(item.link);
}}
cursor={'pointer'}
w={'60px'}

View File

@@ -28,31 +28,31 @@ export const ChatModelMap = {
chatModel: OpenAiChatEnum.GPT35,
name: 'ChatGpt',
contextMaxToken: 4096,
systemMaxToken: 2500,
maxTemperature: 1.5,
systemMaxToken: 2400,
maxTemperature: 1.2,
price: 3
},
[OpenAiChatEnum.GPT4]: {
chatModel: OpenAiChatEnum.GPT4,
name: 'Gpt4',
contextMaxToken: 8000,
systemMaxToken: 3500,
maxTemperature: 1.5,
price: 30
systemMaxToken: 3000,
maxTemperature: 1.2,
price: 50
},
[OpenAiChatEnum.GPT432k]: {
chatModel: OpenAiChatEnum.GPT432k,
name: 'Gpt4-32k',
contextMaxToken: 32000,
systemMaxToken: 6000,
maxTemperature: 1.5,
price: 30
systemMaxToken: 3000,
maxTemperature: 1.2,
price: 90
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
name: 'Claude(免费体验)',
contextMaxToken: 9000,
systemMaxToken: 2500,
systemMaxToken: 2400,
maxTemperature: 1,
price: 0
}
@@ -60,6 +60,7 @@ export const ChatModelMap = {
export const chatModelList: ChatModelItemType[] = [
ChatModelMap[OpenAiChatEnum.GPT35],
ChatModelMap[OpenAiChatEnum.GPT4],
ChatModelMap[ClaudeEnum.Claude]
];

View File

@@ -68,11 +68,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.send(searchPrompts[0]?.value);
}
prompts.splice(prompts.length - 1, 0, ...searchPrompts);
prompts.splice(prompts.length - 3, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.unshift({
prompts.splice(prompts.length - 3, 0, {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});

View File

@@ -87,12 +87,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
prompts.splice(prompts.length - 1, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
if (model.chat.systemPrompt) {
prompts.unshift({
model.chat.systemPrompt &&
prompts.splice(prompts.length - 1, 0, {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});
}
}
// 计算温度

View File

@@ -0,0 +1,16 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { clearCookie } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
clearCookie(res);
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -198,7 +198,6 @@ const Chat = ({
if (newChatId) {
setForbidLoadChatData(true);
router.replace(`/chat?modelId=${modelId}&chatId=${newChatId}`);
loadHistory({ pageNum: 1, init: true });
}
} catch (err) {
toast({
@@ -222,6 +221,9 @@ const Chat = ({
};
})
}));
// refresh history
loadHistory({ pageNum: 1, init: true });
},
[
chatId,
@@ -488,10 +490,6 @@ const Chat = ({
modelId && setLastChatModelId(modelId);
setLastChatId(chatId);
/* get mode and chat into ↓ */
// phone: history page
if (!isPc && Object.keys(router.query).length === 0) return null;
if (forbidLoadChatData) {
setForbidLoadChatData(false);
return null;
@@ -566,7 +564,7 @@ const Chat = ({
try {
setIsLoading(true);
await onclickDelHistory(chatData.chatId);
router.replace(`/chat`);
router.replace(`/chat?modelId=${modelId}`);
} catch (err) {
console.log(err);
}

View File

@@ -7,7 +7,7 @@ import { useToast } from '@/hooks/useToast';
import { useGlobalStore } from '@/store/global';
import { useUserStore } from '@/store/user';
import { UserType } from '@/types/user';
import { clearToken } from '@/utils/user';
import { clearCookie } from '@/utils/user';
import { useRouter } from 'next/router';
import { useQuery } from '@tanstack/react-query';
import dynamic from 'next/dynamic';
@@ -75,7 +75,7 @@ const NumberSetting = () => {
);
const onclickLogOut = useCallback(() => {
clearToken();
clearCookie();
setUserInfo(null);
router.replace('/login');
}, [router, setUserInfo]);

View File

@@ -81,7 +81,7 @@ const OpenApi = () => {
mr={4}
variant={'outline'}
onClick={() => {
copyData(`${location.origin}?inviterId=${userInfo?._id}`, '已复制邀请链接');
copyData(`${location.origin}/?inviterId=${userInfo?._id}`, '已复制邀请链接');
}}
>

View File

@@ -105,11 +105,12 @@ export const searchKb = async ({
: [
{
obj: ChatRoleEnum.System,
value: `我们来玩问答游戏,规则为:
1.你只能回答关于"${model.name}"的问题
2.你只能从知识库中选择内容进行回答
3.如果问题不在知识库中,你会回答"我不知道。"
务必遵守规则`
value: `一个问答游戏,规则为:
1.你完全忘记你已有的知识
2.你只回答关于"${model.name}"的问题
3.你只从知识库中选择内容进行回答
4.如果问题不在知识库中,你会回答:"我不知道。"
请务必遵守规则`
}
])
];
@@ -125,7 +126,8 @@ export const searchKb = async ({
length: Math.floor(maxTokens * rate)
})
)
.join('\n');
.join('\n')
.trim();
/* 高相似度+不回复 */
if (!filterSystemPrompt && model.chat.searchMode === ModelVectorSearchModeEnum.hightSimilarity) {
@@ -160,7 +162,7 @@ export const searchKb = async ({
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: `知识库:'${filterSystemPrompt}'`
value: `知识库:${filterSystemPrompt}`
},
...fixedPrompts
]

View File

@@ -55,11 +55,11 @@ export const getApiKey = async ({
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
systemAuthKey: process.env.GPT4KEY as string
},
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
systemAuthKey: process.env.GPT4KEY as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',

View File

@@ -25,9 +25,9 @@ export const lafClaudChat = async ({
.filter((item) => item.obj === 'System')
.map((item) => item.value)
.join('\n');
const systemPromptText = systemPrompt ? `\n知识库内容:'${systemPrompt}'\n` : '';
const systemPromptText = systemPrompt ? `你本次知识:${systemPrompt}\n` : '';
const prompt = `${systemPromptText}\n我的问题是:'${messages[messages.length - 1].value}'`;
const prompt = `${systemPromptText}我的问题是:'${messages[messages.length - 1].value}'`;
const lafResponse = await axios.post(
'https://hnvacz.laf.run/claude-gpt',

View File

@@ -109,35 +109,22 @@ export const ChatContextFilter = ({
// 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = [];
let systemPrompt: ChatItemSimpleType | null = null;
// System 词保留
if (formatPrompts[0].obj === ChatRoleEnum.System) {
const prompt = formatPrompts.shift();
if (prompt) {
systemPrompt = prompt;
}
}
let messages: ChatItemSimpleType[] = [];
// 从后往前截取对话内容
for (let i = formatPrompts.length - 1; i >= 0; i--) {
chats.unshift(formatPrompts[i]);
messages = systemPrompt ? [systemPrompt, ...chats] : chats;
const tokens = modelToolMap[model].countTokens({
messages
messages: chats
});
/* 整体 tokens 超出范围 */
if (tokens >= maxTokens) {
return systemPrompt ? [systemPrompt, ...chats.slice(1)] : chats.slice(1);
/* 整体 tokens 超出范围, system必须保留 */
if (tokens >= maxTokens && formatPrompts[i].obj !== ChatRoleEnum.System) {
return chats.slice(1);
}
}
return messages;
return chats;
};
/* stream response */

View File

@@ -1,8 +1,12 @@
import { PRICE_SCALE } from '@/constants/common';
const tokenKey = 'fast-gpt-token';
import { loginOut } from '@/api/user';
export const clearToken = () => {
document.cookie = 'token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;';
export const clearCookie = () => {
try {
loginOut();
} catch (error) {
error;
}
};
/**