perf: prompt and gpt4

This commit is contained in:
archer
2023-05-10 12:03:54 +08:00
parent e3c9b8179e
commit cdf4b9f324
12 changed files with 47 additions and 24 deletions

View File

@@ -1,5 +1,5 @@
import axios, { Method, InternalAxiosRequestConfig, AxiosResponse } from 'axios';
import { clearToken } from '@/utils/user';
import { clearCookie } from '@/utils/user';
import { TOKEN_ERROR_CODE } from '@/service/errorCode';
interface ConfigType {
@@ -58,7 +58,7 @@ function responseError(err: any) {
// 有报错响应
const res = err.response;
if (res.data.code in TOKEN_ERROR_CODE) {
clearToken();
clearCookie();
return Promise.reject({ message: 'token过期重新登录' });
}
}

View File

@@ -64,6 +64,8 @@ export const postLogin = ({ username, password }: { username: string; password:
password: createHashPassword(password)
});
export const loginOut = () => GET('/user/loginout');
export const putUserInfo = (data: UserUpdateParams) => PUT('/user/update', data);
export const getUserBills = (data: RequestPaging) =>

View File

@@ -29,7 +29,7 @@ export const ChatModelMap = {
name: 'ChatGpt',
contextMaxToken: 4096,
systemMaxToken: 2500,
maxTemperature: 1.5,
maxTemperature: 1.2,
price: 3
},
[OpenAiChatEnum.GPT4]: {
@@ -37,16 +37,16 @@ export const ChatModelMap = {
name: 'Gpt4',
contextMaxToken: 8000,
systemMaxToken: 3500,
maxTemperature: 1.5,
price: 30
maxTemperature: 1.2,
price: 50
},
[OpenAiChatEnum.GPT432k]: {
chatModel: OpenAiChatEnum.GPT432k,
name: 'Gpt4-32k',
contextMaxToken: 32000,
systemMaxToken: 6000,
maxTemperature: 1.5,
price: 30
maxTemperature: 1.2,
price: 90
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
@@ -60,6 +60,7 @@ export const ChatModelMap = {
export const chatModelList: ChatModelItemType[] = [
ChatModelMap[OpenAiChatEnum.GPT35],
ChatModelMap[OpenAiChatEnum.GPT4],
ChatModelMap[ClaudeEnum.Claude]
];

View File

@@ -68,11 +68,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
return res.send(searchPrompts[0]?.value);
}
prompts.splice(prompts.length - 1, 0, ...searchPrompts);
prompts.splice(prompts.length - 3, 0, ...searchPrompts);
} else {
// 没有用知识库搜索,仅用系统提示词
model.chat.systemPrompt &&
prompts.splice(prompts.length - 1, 0, {
prompts.splice(prompts.length - 3, 0, {
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
});

View File

@@ -0,0 +1,16 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { clearCookie } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
clearCookie(res);
jsonRes(res);
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -7,7 +7,7 @@ import { useToast } from '@/hooks/useToast';
import { useGlobalStore } from '@/store/global';
import { useUserStore } from '@/store/user';
import { UserType } from '@/types/user';
import { clearToken } from '@/utils/user';
import { clearCookie } from '@/utils/user';
import { useRouter } from 'next/router';
import { useQuery } from '@tanstack/react-query';
import dynamic from 'next/dynamic';
@@ -75,7 +75,7 @@ const NumberSetting = () => {
);
const onclickLogOut = useCallback(() => {
clearToken();
clearCookie();
setUserInfo(null);
router.replace('/login');
}, [router, setUserInfo]);

View File

@@ -81,7 +81,7 @@ const OpenApi = () => {
mr={4}
variant={'outline'}
onClick={() => {
copyData(`${location.origin}?inviterId=${userInfo?._id}`, '已复制邀请链接');
copyData(`${location.origin}/?inviterId=${userInfo?._id}`, '已复制邀请链接');
}}
>

View File

@@ -105,11 +105,11 @@ export const searchKb = async ({
: [
{
obj: ChatRoleEnum.System,
value: `我们玩问答游戏,规则为:
value: `我们玩问答游戏,规则为:
1.你完全忘记你已有的知识
2.你只能回答关于"${model.name}"的问题
3.你只能从知识库中选择内容进行回答
4.如果问题不在知识库中,你回答"我不知道。"
4.如果问题不在知识库中,你必须回答:我不知道。
务必遵守规则`
}
])
@@ -161,7 +161,7 @@ export const searchKb = async ({
searchPrompts: [
{
obj: ChatRoleEnum.System,
value: `知识库:'${filterSystemPrompt}'`
value: `知识库:${filterSystemPrompt}`
},
...fixedPrompts
]

View File

@@ -55,11 +55,11 @@ export const getApiKey = async ({
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
systemAuthKey: process.env.GPT4KEY as string
},
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: process.env.OPENAIKEY as string
systemAuthKey: process.env.GPT4KEY as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',

View File

@@ -25,9 +25,9 @@ export const lafClaudChat = async ({
.filter((item) => item.obj === 'System')
.map((item) => item.value)
.join('\n');
const systemPromptText = systemPrompt ? `\n知识库内容:'${systemPrompt}'\n` : '';
const systemPromptText = systemPrompt ? `你本次知识:${systemPrompt}\n` : '';
const prompt = `${systemPromptText}\n我的问题是:'${messages[messages.length - 1].value}'`;
const prompt = `${systemPromptText}我的问题是:'${messages[messages.length - 1].value}'`;
const lafResponse = await axios.post(
'https://hnvacz.laf.run/claude-gpt',

View File

@@ -118,8 +118,8 @@ export const ChatContextFilter = ({
messages: chats
});
/* 整体 tokens 超出范围 */
if (tokens >= maxTokens) {
/* 整体 tokens 超出范围, system必须保留 */
if (tokens >= maxTokens && formatPrompts[i].obj !== ChatRoleEnum.System) {
return chats.slice(1);
}
}

View File

@@ -1,8 +1,12 @@
import { PRICE_SCALE } from '@/constants/common';
const tokenKey = 'fast-gpt-token';
import { loginOut } from '@/api/user';
export const clearToken = () => {
document.cookie = 'token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;';
export const clearCookie = () => {
try {
loginOut();
} catch (error) {
error;
}
};
/**