feat: openapi v2 chat

This commit is contained in:
archer
2023-06-18 19:59:37 +08:00
parent 2b93ae2d00
commit ee9afa310a
27 changed files with 790 additions and 585 deletions

View File

@@ -42,7 +42,7 @@
"nextjs-cors": "^2.1.2", "nextjs-cors": "^2.1.2",
"nodemailer": "^6.9.1", "nodemailer": "^6.9.1",
"nprogress": "^0.2.0", "nprogress": "^0.2.0",
"openai": "^3.2.1", "openai": "^3.3.0",
"papaparse": "^5.4.1", "papaparse": "^5.4.1",
"pg": "^8.10.0", "pg": "^8.10.0",
"react": "18.2.0", "react": "18.2.0",

10
client/pnpm-lock.yaml generated
View File

@@ -105,8 +105,8 @@ dependencies:
specifier: ^0.2.0 specifier: ^0.2.0
version: registry.npmmirror.com/nprogress@0.2.0 version: registry.npmmirror.com/nprogress@0.2.0
openai: openai:
specifier: ^3.2.1 specifier: ^3.3.0
version: registry.npmmirror.com/openai@3.2.1 version: registry.npmmirror.com/openai@3.3.0
papaparse: papaparse:
specifier: ^5.4.1 specifier: ^5.4.1
version: registry.npmmirror.com/papaparse@5.4.1 version: registry.npmmirror.com/papaparse@5.4.1
@@ -10249,10 +10249,10 @@ packages:
is-wsl: registry.npmmirror.com/is-wsl@2.2.0 is-wsl: registry.npmmirror.com/is-wsl@2.2.0
dev: true dev: true
registry.npmmirror.com/openai@3.2.1: registry.npmmirror.com/openai@3.3.0:
resolution: {integrity: sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-3.2.1.tgz} resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-3.3.0.tgz}
name: openai name: openai
version: 3.2.1 version: 3.3.0
dependencies: dependencies:
axios: registry.npmmirror.com/axios@0.26.1 axios: registry.npmmirror.com/axios@0.26.1
form-data: registry.npmmirror.com/form-data@4.0.0 form-data: registry.npmmirror.com/form-data@4.0.0

View File

@@ -1,67 +1,104 @@
import { GUIDE_PROMPT_HEADER, NEW_CHATID_HEADER, QUOTE_LEN_HEADER } from '@/constants/chat'; import { Props, ChatResponseType } from '@/pages/api/openapi/v1/chat/completions';
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
interface StreamFetchProps { interface StreamFetchProps {
url: string; data: Props;
data: any;
onMessage: (text: string) => void; onMessage: (text: string) => void;
abortSignal: AbortController; abortSignal: AbortController;
} }
export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchProps) => export const streamFetch = ({ data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<{ new Promise<ChatResponseType & { responseText: string }>(async (resolve, reject) => {
responseText: string;
newChatId: string;
systemPrompt: string;
quoteLen: number;
}>(async (resolve, reject) => {
try { try {
const res = await fetch(url, { const response = await window.fetch('/api/openapi/v1/chat/completions', {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json' 'Content-Type': 'application/json'
}, },
body: JSON.stringify(data), signal: abortSignal.signal,
signal: abortSignal.signal body: JSON.stringify({
...data,
stream: true
})
}); });
const reader = res.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder(); if (!response?.body) {
throw new Error('Request Error');
}
const newChatId = decodeURIComponent(res.headers.get(NEW_CHATID_HEADER) || ''); const reader = response.body?.getReader();
const systemPrompt = decodeURIComponent(res.headers.get(GUIDE_PROMPT_HEADER) || '').trim(); const decoder = new TextDecoder('utf-8');
const quoteLen = res.headers.get(QUOTE_LEN_HEADER)
? Number(res.headers.get(QUOTE_LEN_HEADER))
: 0;
// response data
let responseText = ''; let responseText = '';
let newChatId = '';
let quoteLen = 0;
const read = async () => { const read = async () => {
try { try {
const { done, value } = await reader?.read(); const { done, value } = await reader.read();
if (done) { if (done) {
if (res.status === 200) { if (response.status === 200) {
resolve({ responseText, newChatId, quoteLen, systemPrompt }); return resolve({
responseText,
newChatId,
quoteLen
});
} else { } else {
const parseError = JSON.parse(responseText); return reject('响应过程出现异常~');
reject(parseError?.message || '请求异常');
} }
return;
} }
const text = decoder.decode(value); const chunk = decoder.decode(value);
responseText += text; const chunkLines = chunk.split('\n\n').filter((item) => item);
onMessage(text); const chunkResponse = chunkLines.map((item) => {
const splitEvent = item.split('\n');
if (splitEvent.length === 2) {
return {
event: splitEvent[0].replace('event: ', ''),
data: splitEvent[1].replace('data: ', '')
};
}
return {
event: '',
data: splitEvent[0].replace('data: ', '')
};
});
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return item.data;
}
})();
if (item.event === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices[0].delta.content || '';
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.chatResponse) {
const chatResponse = data as ChatResponseType;
newChatId = chatResponse.newChatId;
quoteLen = chatResponse.quoteLen || 0;
}
});
read(); read();
} catch (err: any) { } catch (err: any) {
if (err?.message === 'The user aborted a request.') { if (err?.message === 'The user aborted a request.') {
return resolve({ responseText, newChatId, quoteLen, systemPrompt }); return resolve({
responseText,
newChatId,
quoteLen
});
} }
reject(typeof err === 'string' ? err : err?.message || '请求异常'); reject(getErrText(err, '请求异常'));
} }
}; };
read(); read();
} catch (err: any) { } catch (err: any) {
console.log(err, '===='); console.log(err, '====');
reject(typeof err === 'string' ? err : err?.message || '请求异常'); reject(getErrText(err, '请求异常'));
} }
}); });

View File

@@ -4,6 +4,7 @@ import type { ChatItemType } from '@/types/chat';
export interface InitChatResponse { export interface InitChatResponse {
chatId: string; chatId: string;
modelId: string; modelId: string;
systemPrompt?: string;
model: { model: {
name: string; name: string;
avatar: string; avatar: string;

View File

@@ -1,6 +1,7 @@
export const NEW_CHATID_HEADER = 'response-new-chat-id'; export enum sseResponseEventEnum {
export const QUOTE_LEN_HEADER = 'response-quote-len'; answer = 'answer',
export const GUIDE_PROMPT_HEADER = 'response-guide-prompt'; chatResponse = 'chatResponse'
}
export enum ChatRoleEnum { export enum ChatRoleEnum {
System = 'System', System = 'System',

View File

@@ -12,11 +12,8 @@ export enum OpenAiChatEnum {
'GPT4' = 'gpt-4', 'GPT4' = 'gpt-4',
'GPT432k' = 'gpt-4-32k' 'GPT432k' = 'gpt-4-32k'
} }
export enum ClaudeEnum {
'Claude' = 'Claude'
}
export type ChatModelType = `${OpenAiChatEnum}` | `${ClaudeEnum}`; export type ChatModelType = `${OpenAiChatEnum}`;
export type ChatModelItemType = { export type ChatModelItemType = {
chatModel: ChatModelType; chatModel: ChatModelType;
@@ -59,14 +56,6 @@ export const ChatModelMap = {
systemMaxToken: 8000, systemMaxToken: 8000,
maxTemperature: 1.2, maxTemperature: 1.2,
price: 90 price: 90
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
name: 'Claude(免费体验)',
contextMaxToken: 9000,
systemMaxToken: 2700,
maxTemperature: 1,
price: 0
} }
}; };

View File

@@ -1,192 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { appKbSearch } from '../openapi/kb/appKbSearch';
import { ChatRoleEnum, QUOTE_LEN_HEADER, GUIDE_PROMPT_HEADER } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '../openapi/text/sensitiveCheck';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { saveChat } from './saveChat';
import { Types } from 'mongoose';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { chatId, prompt, modelId } = req.body as {
prompt: [ChatItemType, ChatItemType];
modelId: string;
chatId?: string;
};
if (!modelId || !prompt || prompt.length !== 2) {
throw new Error('Chat 缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, showModelDetail, content, userOpenAiKey, systemAuthKey, userId } =
await authChat({
modelId,
chatId,
req
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: content[content.length - 1]?.quote || [],
prompt: prompt[0],
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
if (showModelDetail) {
userSystemPrompt[0] &&
res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(userSystemPrompt[0].value));
res.setHeader(QUOTE_LEN_HEADER, rawSearch.length);
}
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
quote: [],
value: response
}
],
userId
});
return res.end(response);
}
// 读取对话内容
const prompts = [...quotePrompt, ...content, ...userSystemPrompt, prompt[0]];
// content check
await sensitiveCheck({
input: [...quotePrompt, ...userSystemPrompt, prompt[0]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出 chat 请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId: conversationId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages, responseContent } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
// save chat
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
value: responseContent,
quote: showModelDetail ? rawSearch : [],
systemPrompt: showModelDetail ? userSystemPrompt[0]?.value : ''
}
],
userId
});
res.end();
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId: conversationId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -20,31 +20,32 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase(); await connectToDatabase();
let model: ModelSchema;
// 没有 modelId 时直接获取用户的第一个id // 没有 modelId 时直接获取用户的第一个id
if (!modelId) { const model = await (async () => {
const myModel = await Model.findOne({ userId }); if (!modelId) {
if (!myModel) { const myModel = await Model.findOne({ userId });
const { _id } = await Model.create({ if (!myModel) {
name: '应用1', const { _id } = await Model.create({
userId name: '应用1',
}); userId
model = (await Model.findById(_id)) as ModelSchema; });
return (await Model.findById(_id)) as ModelSchema;
} else {
return myModel;
}
} else { } else {
model = myModel; // 校验使用权限
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
return authRes.model;
} }
modelId = model._id; })();
} else {
// 校验使用权限 modelId = modelId || model._id;
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
model = authRes.model;
}
// 历史记录 // 历史记录
let history: ChatItemType[] = []; let history: ChatItemType[] = [];
@@ -86,6 +87,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
]); ]);
} }
const isOwner = String(model.userId) === userId;
jsonRes<InitChatResponse>(res, { jsonRes<InitChatResponse>(res, {
data: { data: {
chatId: chatId || '', chatId: chatId || '',
@@ -94,9 +97,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
name: model.name, name: model.name,
avatar: model.avatar, avatar: model.avatar,
intro: model.intro, intro: model.intro,
canUse: model.share.isShare || String(model.userId) === userId canUse: model.share.isShare || isOwner
}, },
chatModel: model.chat.chatModel, chatModel: model.chat.chatModel,
systemPrompt: isOwner ? model.chat.systemPrompt : '',
history history
} }
}); });

View File

@@ -4,10 +4,9 @@ import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat, Model } from '@/service/mongo'; import { connectToDatabase, Chat, Model } from '@/service/mongo';
import { authModel } from '@/service/utils/auth'; import { authModel } from '@/service/utils/auth';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import mongoose from 'mongoose'; import { Types } from 'mongoose';
type Props = { type Props = {
newChatId?: string;
chatId?: string; chatId?: string;
modelId: string; modelId: string;
prompts: [ChatItemType, ChatItemType]; prompts: [ChatItemType, ChatItemType];
@@ -16,7 +15,7 @@ type Props = {
/* 聊天内容存存储 */ /* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
const { chatId, modelId, prompts, newChatId } = req.body as Props; const { chatId, modelId, prompts } = req.body as Props;
if (!prompts) { if (!prompts) {
throw new Error('缺少参数'); throw new Error('缺少参数');
@@ -24,16 +23,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true }); const { userId } = await authUser({ req, authToken: true });
const nId = await saveChat({ const response = await saveChat({
chatId, chatId,
modelId, modelId,
prompts, prompts,
newChatId,
userId userId
}); });
jsonRes(res, { jsonRes(res, {
data: nId data: response
}); });
} catch (err) { } catch (err) {
jsonRes(res, { jsonRes(res, {
@@ -44,58 +42,54 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
} }
export async function saveChat({ export async function saveChat({
chatId,
newChatId, newChatId,
chatId,
modelId, modelId,
prompts, prompts,
userId userId
}: Props & { userId: string }) { }: Props & { newChatId?: Types.ObjectId; userId: string }) {
await connectToDatabase(); await connectToDatabase();
const { model } = await authModel({ modelId, userId, authOwner: false }); const { model } = await authModel({ modelId, userId, authOwner: false });
const content = prompts.map((item) => ({ const content = prompts.map((item) => ({
_id: item._id ? new mongoose.Types.ObjectId(item._id) : undefined, _id: item._id,
obj: item.obj, obj: item.obj,
value: item.value, value: item.value,
systemPrompt: item.systemPrompt, systemPrompt: item.systemPrompt || '',
quote: item.quote || [] quote: item.quote || []
})); }));
const [id] = await Promise.all([ if (String(model.userId) === userId) {
...(chatId // update chat Model.findByIdAndUpdate(modelId, {
? [ updateTime: new Date()
Chat.findByIdAndUpdate(chatId, { });
$push: { }
content: {
$each: content const response = await (chatId
} ? Chat.findByIdAndUpdate(chatId, {
}, $push: {
title: content[0].value.slice(0, 20), content: {
latestChat: content[1].value, $each: content
updateTime: new Date() }
}).then(() => '') },
] title: content[0].value.slice(0, 20),
: [ latestChat: content[1].value,
Chat.create({ updateTime: new Date()
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined, }).then(() => ({
userId, newChatId: ''
modelId, }))
content, : Chat.create({
title: content[0].value.slice(0, 20), _id: newChatId,
latestChat: content[1].value userId,
}).then((res) => res._id) modelId,
]), content,
// update model title: content[0].value.slice(0, 20),
...(String(model.userId) === userId latestChat: content[1].value
? [ }).then((res) => ({
Model.findByIdAndUpdate(modelId, { newChatId: String(res._id)
updateTime: new Date() })));
})
]
: [])
]);
return { return {
id ...response
}; };
} }

View File

@@ -1,149 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authShareChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '../../openapi/text/sensitiveCheck';
import { appKbSearch } from '../../openapi/kb/appKbSearch';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { shareId, password, historyId, prompts } = req.body as {
prompts: ChatItemSimpleType[];
password: string;
shareId: string;
historyId: string;
};
if (!historyId || !prompts) {
throw new Error('分享链接无效');
}
await connectToDatabase();
let startTime = Date.now();
const { model, userOpenAiKey, systemAuthKey, userId } = await authShareChat({
shareId,
password
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const prompt = prompts[prompts.length - 1];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
return res.end(response);
}
// 读取对话内容
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
// content check
await sensitiveCheck({
input: [...quotePrompt, ...userSystemPrompt, prompt].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: completePrompts,
stream: true,
res,
chatId: historyId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
res.end();
/* bill */
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
updateShareChatBill({
shareId,
tokens: totalTokens
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -2,15 +2,13 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo'; import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey } from '@/service/utils/auth'; import { authUser, authModel, getApiKey } from '@/service/utils/auth';
import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat'; import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat'; import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model'; import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill'; import { pushChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user'; import { BillTypeEnum } from '@/constants/user';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { Types } from 'mongoose';
import { appKbSearch } from '../kb/appKbSearch'; import { appKbSearch } from '../kb/appKbSearch';
/* 发送提示词 */ /* 发送提示词 */
@@ -31,7 +29,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
isStream = true isStream = true
} = req.body as { } = req.body as {
chatId?: string; chatId?: string;
prompts: ChatItemSimpleType[]; prompts: ChatItemType[];
modelId: string; modelId: string;
isStream: boolean; isStream: boolean;
}; };
@@ -111,10 +109,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
2 2
); );
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res?.setHeader(NEW_CHATID_HEADER, conversationId);
// 发出请求 // 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } = const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({ await modelServiceToolMap[model.chat.chatModel].chatCompletion({
@@ -122,8 +116,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
temperature: +temperature, temperature: +temperature,
messages: completePrompts, messages: completePrompts,
stream: isStream, stream: isStream,
res, res
chatId: conversationId
}); });
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`); console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg'; import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools'; import { withNextCors } from '@/service/utils/tools';
import type { ChatItemSimpleType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
import { authModel } from '@/service/utils/auth'; import { authModel } from '@/service/utils/auth';
import { ChatModelMap } from '@/constants/model'; import { ChatModelMap } from '@/constants/model';
@@ -18,7 +18,7 @@ export type QuoteItemType = {
source?: string; source?: string;
}; };
type Props = { type Props = {
prompts: ChatItemSimpleType[]; prompts: ChatItemType[];
similarity: number; similarity: number;
limit: number; limit: number;
appId: string; appId: string;
@@ -79,15 +79,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function appKbSearch({ export async function appKbSearch({
model, model,
userId, userId,
fixedQuote, fixedQuote = [],
prompt, prompt,
similarity = 0.8, similarity = 0.8,
limit = 5 limit = 5
}: { }: {
model: ModelSchema; model: ModelSchema;
userId: string; userId: string;
fixedQuote: QuoteItemType[]; fixedQuote?: QuoteItemType[];
prompt: ChatItemSimpleType; prompt: ChatItemType;
similarity: number; similarity: number;
limit: number; limit: number;
}): Promise<Response> { }): Promise<Response> {
@@ -120,7 +120,7 @@ export async function appKbSearch({
...searchRes.slice(0, 3), ...searchRes.slice(0, 3),
...fixedQuote.slice(0, 2), ...fixedQuote.slice(0, 2),
...searchRes.slice(3), ...searchRes.slice(3),
...fixedQuote.slice(2, 4) ...fixedQuote.slice(2, Math.floor(fixedQuote.length * 0.4))
].filter((item) => { ].filter((item) => {
if (idSet.has(item.id)) { if (idSet.has(item.id)) {
return false; return false;

View File

@@ -2,18 +2,18 @@
import type { NextApiRequest, NextApiResponse } from 'next'; import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response'; import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth'; import { authUser } from '@/service/utils/auth';
import type { ChatItemSimpleType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai'; import { countOpenAIToken } from '@/utils/plugin/openai';
import { OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
type ModelType = `${OpenAiChatEnum}`; type ModelType = `${OpenAiChatEnum}`;
type Props = { type Props = {
messages: ChatItemSimpleType[]; messages: ChatItemType[];
model: ModelType; model: ModelType;
maxLen: number; maxLen: number;
}; };
type Response = ChatItemSimpleType[]; type Response = ChatItemType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) { export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try { try {
@@ -45,11 +45,11 @@ export function gpt_chatItemTokenSlice({
model, model,
maxToken maxToken
}: { }: {
messages: ChatItemSimpleType[]; messages: ChatItemType[];
model: ModelType; model: ModelType;
maxToken: number; maxToken: number;
}) { }) {
let result: ChatItemSimpleType[] = []; let result: ChatItemType[] = [];
for (let i = 0; i < messages.length; i++) { for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]]; const msgs = [...result, messages[i]];

View File

@@ -0,0 +1,311 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey, authShareChat, type AuthType } from '@/service/utils/auth';
import { modelServiceToolMap, V2_StreamResponse } from '@/service/utils/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { appKbSearch } from '../../../openapi/kb/appKbSearch';
import type { CreateChatCompletionRequest } from 'openai';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { getErrText } from '@/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { Types } from 'mongoose';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
chatId?: string; // undefined: nonuse history, '': new chat, 'xxxxx': use history
appId?: string;
};
type FastGptShareChatProps = {
password?: string;
shareId?: string;
};
export type Props = CreateChatCompletionRequest &
FastGptWebChatProps &
FastGptShareChatProps & {
messages: MessageItemType[];
};
export type ChatResponseType = {
newChatId: string;
quoteLen?: number;
};
/* 发送提示词 */
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
let { chatId, appId, shareId, password = '', stream = false, messages = [] } = req.body as Props;
let step = 0;
try {
if (!messages) {
throw new Error('Prams Error');
}
if (!Array.isArray(messages)) {
throw new Error('messages is not array');
}
await connectToDatabase();
let startTime = Date.now();
/* user auth */
const {
userId,
appId: authAppid,
authType
} = await (shareId
? authShareChat({
shareId,
password
})
: authUser({ req }));
appId = appId ? appId : authAppid;
if (!appId) {
throw new Error('appId is empty');
}
// auth app permission
const { model, showModelDetail } = await authModel({
userId,
modelId: appId,
authOwner: false,
reserveDetail: true
});
const showAppDetail = !shareId && showModelDetail;
/* get api key */
const { systemAuthKey: apiKey, userOpenAiKey } = await getApiKey({
model: model.chat.chatModel,
userId,
mustPay: authType !== 'token'
});
// get history
const { history } = await getChatHistory({ chatId, userId });
const prompts = history.concat(gptMessage2ChatType(messages));
// adapt fastgpt web
if (prompts[prompts.length - 1].obj === 'AI') {
prompts.pop();
}
// user question
const prompt = prompts[prompts.length - 1];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: history[history.length - 1]?.quote,
prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: response,
model: model.chat.chatModel,
finish_reason: 'stop'
})
});
return res.end();
} else {
return res.json({
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
choices: [
{ message: [{ role: 'assistant', content: response }], finish_reason: 'stop', index: 0 }
]
});
}
}
// api messages. [quote,context,systemPrompt,question]
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
// chat temperature
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// FastGpt temperature range: 1~10
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// start model api. responseText and totalTokens: valid only if stream = false
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey: userOpenAiKey || apiKey,
temperature: +temperature,
messages: completePrompts,
stream,
res
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
// create a chatId
const newChatId = chatId === '' ? new Types.ObjectId() : undefined;
// response answer
const {
textLen = 0,
answer = responseText,
tokens = totalTokens
} = await (async () => {
if (stream) {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Transfer-Encoding', 'chunked');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
step = 1;
try {
// response newChatId and quota
sseResponse({
res,
event: sseResponseEventEnum.chatResponse,
data: JSON.stringify({
newChatId,
quoteLen: rawSearch.length
})
});
// response answer
const { finishMessages, totalTokens, responseContent } = await V2_StreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
return {
answer: responseContent,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens
};
} catch (error) {
console.log('stream response error', error);
return {};
}
} else {
return {
textLen: responseMessages.map((item) => item.value).join('').length
};
}
})();
// save chat history
if (typeof chatId === 'string') {
await saveChat({
newChatId,
chatId,
modelId: appId,
prompts: [
prompt,
{
_id: messages[messages.length - 1]._id,
obj: ChatRoleEnum.AI,
value: answer,
...(showAppDetail
? {
quote: rawSearch,
systemPrompt: userSystemPrompt?.[0]?.value
}
: {})
}
],
userId
});
}
// close response
if (stream) {
res.end();
} else {
res.json({
...(showAppDetail
? {
rawSearch
}
: {}),
newChatId,
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: tokens },
choices: [
{ message: [{ role: 'assistant', content: answer }], finish_reason: 'stop', index: 0 }
]
});
}
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen,
tokens,
type: authType === 'apikey' ? BillTypeEnum.openapiChat : BillTypeEnum.chat
});
shareId &&
updateShareChatBill({
shareId,
tokens
});
} catch (err: any) {
res.status(500);
if (step === 1) {
res.end(getErrText(err, 'Stream response error'));
} else {
jsonRes(res, {
code: 500,
error: err
});
}
}
});

View File

@@ -0,0 +1,66 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat';
export type Props = {
chatId?: string;
limit?: number;
};
export type Response = { history: ChatItemType[] };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { userId } = await authUser({ req });
const { chatId, limit } = req.body as Props;
jsonRes<Response>(res, {
data: await getChatHistory({
chatId,
userId,
limit
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export async function getChatHistory({
chatId,
userId,
limit = 50
}: Props & { userId: string }): Promise<Response> {
if (!chatId) {
return { history: [] };
}
const history = await Chat.aggregate([
{ $match: { _id: new Types.ObjectId(chatId), userId: new Types.ObjectId(userId) } },
{
$project: {
content: {
$slice: ['$content', -limit] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
_id: '$content._id',
obj: '$content.obj',
value: '$content.value',
quote: '$content.quote'
}
}
]);
return { history };
}

View File

@@ -59,6 +59,7 @@ const History = dynamic(() => import('./components/History'), {
}); });
import styles from './index.module.scss'; import styles from './index.module.scss';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
const textareaMinH = '22px'; const textareaMinH = '22px';
@@ -170,19 +171,15 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
controller.current = abortSignal; controller.current = abortSignal;
isLeavePage.current = false; isLeavePage.current = false;
const prompt: ChatItemType[] = prompts.map((item) => ({ const messages = adaptChatItem_openAI({ messages: prompts, reserveId: true });
_id: item._id,
obj: item.obj,
value: item.value
}));
// 流请求,获取数据 // 流请求,获取数据
const { newChatId, quoteLen, systemPrompt } = await streamFetch({ const { newChatId, quoteLen } = await streamFetch({
url: '/api/chat/chat',
data: { data: {
prompt, messages,
chatId, chatId,
modelId appId: modelId,
model: ''
}, },
onMessage: (text: string) => { onMessage: (text: string) => {
setChatData((state) => ({ setChatData((state) => ({
@@ -222,7 +219,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
...item, ...item,
status: 'finish', status: 'finish',
quoteLen, quoteLen,
systemPrompt systemPrompt: chatData.systemPrompt
}; };
}) })
})); }));
@@ -237,6 +234,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
[ [
chatId, chatId,
modelId, modelId,
chatData.systemPrompt,
setChatData, setChatData,
loadHistory, loadHistory,
loadMyModels, loadMyModels,
@@ -328,8 +326,8 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// 删除一句话 // 删除一句话
const delChatRecord = useCallback( const delChatRecord = useCallback(
async (index: number, historyId: string) => { async (index: number, historyId?: string) => {
if (!messageContextMenuData) return; if (!messageContextMenuData || !historyId) return;
setIsLoading(true); setIsLoading(true);
try { try {

View File

@@ -56,6 +56,7 @@ const ShareHistory = dynamic(() => import('./components/ShareHistory'), {
}); });
import styles from './index.module.scss'; import styles from './index.module.scss';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
const textareaMinH = '22px'; const textareaMinH = '22px';
@@ -170,19 +171,15 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
controller.current = abortSignal; controller.current = abortSignal;
isLeavePage.current = false; isLeavePage.current = false;
const formatPrompts = prompts.map((item) => ({ const messages = adaptChatItem_openAI({ messages: prompts, reserveId: true });
obj: item.obj,
value: item.value
}));
// 流请求,获取数据 // 流请求,获取数据
const { responseText } = await streamFetch({ const { responseText } = await streamFetch({
url: '/api/chat/shareChat/chat',
data: { data: {
prompts: formatPrompts.slice(-shareChatData.maxContext - 1, -1), messages: messages.slice(-shareChatData.maxContext - 1, -1),
password, password,
shareId, shareId,
historyId model: ''
}, },
onMessage: (text: string) => { onMessage: (text: string) => {
setShareChatData((state) => ({ setShareChatData((state) => ({
@@ -226,7 +223,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
setShareChatHistory({ setShareChatHistory({
historyId, historyId,
shareId, shareId,
title: formatPrompts[formatPrompts.length - 2].value, title: prompts[prompts.length - 2].value,
latestChat: responseText, latestChat: responseText,
chats: responseHistory chats: responseHistory
}); });
@@ -235,7 +232,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
{ {
type: 'shareChatFinish', type: 'shareChatFinish',
data: { data: {
question: formatPrompts[formatPrompts.length - 2].value, question: prompts[prompts.length - 2].value,
answer: responseText answer: responseText
} }
}, },

View File

@@ -52,7 +52,7 @@ const ChatSchema = new Schema({
}, },
value: { value: {
type: String, type: String,
required: true default: ''
}, },
quote: { quote: {
type: [ type: [

View File

@@ -3,15 +3,16 @@ import jwt from 'jsonwebtoken';
import Cookie from 'cookie'; import Cookie from 'cookie';
import { Chat, Model, OpenApi, User, ShareChat, KB } from '../mongo'; import { Chat, Model, OpenApi, User, ShareChat, KB } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema'; import type { ModelSchema } from '@/types/mongoSchema';
import type { ChatItemSimpleType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import mongoose from 'mongoose'; import mongoose from 'mongoose';
import { ClaudeEnum, defaultModel, embeddingModel, EmbeddingModelType } from '@/constants/model'; import { defaultModel } from '@/constants/model';
import { formatPrice } from '@/utils/user'; import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode'; import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model'; import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools'; import { hashPassword } from '@/service/utils/tools';
export type ApiKeyType = 'training' | 'chat'; export type ApiKeyType = 'training' | 'chat';
export type AuthType = 'token' | 'root' | 'apikey';
export const parseCookie = (cookie?: string): Promise<string> => { export const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@@ -39,13 +40,11 @@ export const parseCookie = (cookie?: string): Promise<string> => {
export const authUser = async ({ export const authUser = async ({
req, req,
authToken = false, authToken = false,
authOpenApi = false,
authRoot = false, authRoot = false,
authBalance = false authBalance = false
}: { }: {
req: NextApiRequest; req: NextApiRequest;
authToken?: boolean; authToken?: boolean;
authOpenApi?: boolean;
authRoot?: boolean; authRoot?: boolean;
authBalance?: boolean; authBalance?: boolean;
}) => { }) => {
@@ -71,6 +70,36 @@ export const authUser = async ({
return Promise.reject(error); return Promise.reject(error);
} }
}; };
const parseAuthorization = async (authorization?: string) => {
if (!authorization) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// Bearer fastgpt-xxxx-appId
const auth = authorization.split(' ')[1];
if (!auth) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const { apiKey, appId } = await (async () => {
const arr = auth.split('-');
if (arr.length !== 3) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return {
apiKey: `${arr[0]}-${arr[1]}`,
appId: arr[2]
};
})();
// auth apiKey
const uid = await parseOpenApiKey(apiKey);
return {
uid,
appId
};
};
const parseRootKey = async (rootKey?: string, userId = '') => { const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) { if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization); return Promise.reject(ERROR_ENUM.unAuthorization);
@@ -78,31 +107,43 @@ export const authUser = async ({
return userId; return userId;
}; };
const { cookie, apikey, rootkey, userid } = (req.headers || {}) as { const { cookie, apikey, rootkey, userid, authorization } = (req.headers || {}) as {
cookie?: string; cookie?: string;
apikey?: string; apikey?: string;
rootkey?: string; rootkey?: string;
userid?: string; userid?: string;
authorization?: string;
}; };
let uid = ''; let uid = '';
let appId = '';
let authType: AuthType = 'token';
if (authToken) { if (authToken) {
uid = await parseCookie(cookie); uid = await parseCookie(cookie);
} else if (authOpenApi) { authType = 'token';
uid = await parseOpenApiKey(apikey);
} else if (authRoot) { } else if (authRoot) {
uid = await parseRootKey(rootkey, userid); uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else if (cookie) { } else if (cookie) {
uid = await parseCookie(cookie); uid = await parseCookie(cookie);
authType = 'token';
} else if (apikey) { } else if (apikey) {
uid = await parseOpenApiKey(apikey); uid = await parseOpenApiKey(apikey);
authType = 'apikey';
} else if (authorization) {
const authResponse = await parseAuthorization(authorization);
uid = authResponse.uid;
appId = authResponse.appId;
authType = 'apikey';
} else if (rootkey) { } else if (rootkey) {
uid = await parseRootKey(rootkey, userid); uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else { } else {
return Promise.reject(ERROR_ENUM.unAuthorization); return Promise.reject(ERROR_ENUM.unAuthorization);
} }
// balance check
if (authBalance) { if (authBalance) {
const user = await User.findById(uid); const user = await User.findById(uid);
if (!user) { if (!user) {
@@ -115,7 +156,9 @@ export const authUser = async ({
} }
return { return {
userId: uid userId: uid,
appId,
authType
}; };
}; };
@@ -173,15 +216,15 @@ export const getApiKey = async ({
[OpenAiChatEnum.GPT432k]: { [OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '', userOpenAiKey: user.openaiKey || '',
systemAuthKey: getGpt4Key() as string systemAuthKey: getGpt4Key() as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',
systemAuthKey: process.env.CLAUDE_KEY as string
} }
}; };
if (!keyMap[model]) {
return Promise.reject('App model is exists');
}
// 有自己的key // 有自己的key
if (!mustPay && keyMap[model]?.userOpenAiKey) { if (!mustPay && keyMap[model].userOpenAiKey) {
return { return {
user, user,
userOpenAiKey: keyMap[model].userOpenAiKey, userOpenAiKey: keyMap[model].userOpenAiKey,
@@ -240,7 +283,7 @@ export const authModel = async ({
return { return {
model, model,
showModelDetail: model.share.isShareDetail || userId === String(model.userId) showModelDetail: userId === String(model.userId)
}; };
}; };
@@ -277,7 +320,7 @@ export const authChat = async ({
}); });
// 聊天内容 // 聊天内容
let content: ChatItemSimpleType[] = []; let content: ChatItemType[] = [];
if (chatId) { if (chatId) {
// 获取 chat 数据 // 获取 chat 数据
@@ -336,28 +379,9 @@ export const authShareChat = async ({
}); });
} }
const modelId = String(shareChat.modelId);
const userId = String(shareChat.userId);
// 获取 model 数据
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 获取 user 的 apiKey
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: model.chat.chatModel,
userId
});
return { return {
userOpenAiKey, userId: String(shareChat.userId),
systemAuthKey, appId: String(shareChat.modelId),
userId, authType: 'token' as AuthType
model,
showModelDetail
}; };
}; };

View File

@@ -1,35 +1,37 @@
import { ChatItemSimpleType } from '@/types/chat'; import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin'; import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model'; import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model'; import { sseResponse } from '../tools';
import { OpenAiChatEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai'; import { chatResponse, openAiStreamResponse } from './openai';
import { claudChat, claudStreamResponse } from './claude';
import type { NextApiResponse } from 'next'; import type { NextApiResponse } from 'next';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { textAdaptGptResponse } from '@/utils/adapt';
export type ChatCompletionType = { export type ChatCompletionType = {
apiKey: string; apiKey: string;
temperature: number; temperature: number;
messages: ChatItemSimpleType[]; messages: ChatItemType[];
chatId?: string; chatId?: string;
[key: string]: any; [key: string]: any;
}; };
export type ChatCompletionResponseType = { export type ChatCompletionResponseType = {
streamResponse: any; streamResponse: any;
responseMessages: ChatItemSimpleType[]; responseMessages: ChatItemType[];
responseText: string; responseText: string;
totalTokens: number; totalTokens: number;
}; };
export type StreamResponseType = { export type StreamResponseType = {
chatResponse: any; chatResponse: any;
prompts: ChatItemSimpleType[]; prompts: ChatItemType[];
res: NextApiResponse; res: NextApiResponse;
[key: string]: any; [key: string]: any;
}; };
export type StreamResponseReturnType = { export type StreamResponseReturnType = {
responseContent: string; responseContent: string;
totalTokens: number; totalTokens: number;
finishMessages: ChatItemSimpleType[]; finishMessages: ChatItemType[];
}; };
export const modelServiceToolMap: Record< export const modelServiceToolMap: Record<
@@ -74,10 +76,6 @@ export const modelServiceToolMap: Record<
model: OpenAiChatEnum.GPT432k, model: OpenAiChatEnum.GPT432k,
...data ...data
}) })
},
[ClaudeEnum.Claude]: {
chatCompletion: claudChat,
streamResponse: claudStreamResponse
} }
}; };
@@ -95,11 +93,11 @@ export const ChatContextFilter = ({
maxTokens maxTokens
}: { }: {
model: ChatModelType; model: ChatModelType;
prompts: ChatItemSimpleType[]; prompts: ChatItemType[];
maxTokens: number; maxTokens: number;
}) => { }) => {
const systemPrompts: ChatItemSimpleType[] = []; const systemPrompts: ChatItemType[] = [];
const chatPrompts: ChatItemSimpleType[] = []; const chatPrompts: ChatItemType[] = [];
let rawTextLen = 0; let rawTextLen = 0;
prompts.forEach((item) => { prompts.forEach((item) => {
@@ -107,6 +105,7 @@ export const ChatContextFilter = ({
rawTextLen += val.length; rawTextLen += val.length;
const data = { const data = {
_id: item._id,
obj: item.obj, obj: item.obj,
value: val value: val
}; };
@@ -129,7 +128,7 @@ export const ChatContextFilter = ({
}); });
// 根据 tokens 截断内容 // 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = []; const chats: ChatItemType[] = [];
// 从后往前截取对话内容 // 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) { for (let i = chatPrompts.length - 1; i >= 0; i--) {
@@ -174,3 +173,89 @@ export const resStreamResponse = async ({
return { responseContent, totalTokens, finishMessages }; return { responseContent, totalTokens, finishMessages };
}; };
/* stream response */
export const V2_StreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
let responseContent = '';
try {
const onParse = async (e: ParsedEvent | ReconnectInterval) => {
if (e.type !== 'event') return;
const data = e.data;
const { content = '' } = (() => {
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
return { content };
} catch (error) {}
return {};
})();
if (res.closed) return;
if (data === '[DONE]') {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
})
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
} else {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: content
})
});
}
};
try {
const parser = createParser(onParse);
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (res.closed) {
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
} catch (error) {
console.log('stream error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
};

View File

@@ -28,13 +28,13 @@ export const chatResponse = async ({
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85) maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85)
}); });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages }); const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi(); const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion( const response = await chatAPI.createChatCompletion(
{ {
model, model,
temperature: Number(temperature) || 0, temperature: Number(temperature || 0),
messages: adaptMessages, messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少 frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容 presence_penalty: -0.5, // 越大,越容易出现新内容

View File

@@ -4,6 +4,7 @@ import crypto from 'crypto';
import jwt from 'jsonwebtoken'; import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA'; import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector'; import { generateVector } from '../events/generateVector';
import { sseResponseEventEnum } from '@/constants/chat';
/* 密码加密 */ /* 密码加密 */
export const hashPassword = (psw: string) => { export const hashPassword = (psw: string) => {
@@ -67,3 +68,16 @@ export const startQueue = () => {
generateVector(); generateVector();
} }
}; };
export const sseResponse = ({
res,
event,
data
}: {
res: NextApiResponse;
event?: `${sseResponseEventEnum}`;
data: string;
}) => {
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};

View File

@@ -44,7 +44,7 @@ type State = {
delShareChatHistory: (shareId?: string) => void; delShareChatHistory: (shareId?: string) => void;
}; };
const defaultChatData = { const defaultChatData: ChatType = {
chatId: 'chatId', chatId: 'chatId',
modelId: 'modelId', modelId: 'modelId',
model: { model: {

View File

@@ -4,16 +4,14 @@ import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
export type ExportChatType = 'md' | 'pdf' | 'html'; export type ExportChatType = 'md' | 'pdf' | 'html';
export type ChatItemSimpleType = { export type ChatItemType = {
_id?: string;
obj: `${ChatRoleEnum}`; obj: `${ChatRoleEnum}`;
value: string; value: string;
quoteLen?: number; quoteLen?: number;
quote?: QuoteItemType[]; quote?: QuoteItemType[];
systemPrompt?: string; systemPrompt?: string;
}; };
export type ChatItemType = {
_id: string;
} & ChatItemSimpleType;
export type ChatSiteItemType = { export type ChatSiteItemType = {
status: 'loading' | 'finish'; status: 'loading' | 'finish';

View File

@@ -2,6 +2,10 @@ import { formatPrice } from './user';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import type { BillSchema } from '../types/mongoSchema'; import type { BillSchema } from '../types/mongoSchema';
import type { UserBillType } from '@/types/user'; import type { UserBillType } from '@/types/user';
import { ChatItemType } from '@/types/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
export const adaptBill = (bill: BillSchema): UserBillType => { export const adaptBill = (bill: BillSchema): UserBillType => {
return { return {
@@ -14,3 +18,37 @@ export const adaptBill = (bill: BillSchema): UserBillType => {
price: formatPrice(bill.price) price: formatPrice(bill.price)
}; };
}; };
export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[] => {
const roleMap = {
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System
};
return messages.map((item) => ({
_id: item._id,
obj: roleMap[item.role],
value: item.content
}));
};
export const textAdaptGptResponse = ({
text,
model,
finish_reason = null,
extraData = {}
}: {
model?: string;
text: string | null;
finish_reason?: null | 'stop';
extraData?: Object;
}) => {
return JSON.stringify({
...extraData,
id: '',
object: '',
created: 0,
model,
choices: [{ delta: text === null ? {} : { content: text }, index: 0, finish_reason }]
});
};

View File

@@ -1,18 +1,15 @@
import { ClaudeEnum, OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
import type { ChatModelType } from '@/constants/model'; import type { ChatModelType } from '@/constants/model';
import type { ChatItemSimpleType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken, openAiSliceTextByToken } from './openai'; import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice'; import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap: Record< export const modelToolMap: Record<
ChatModelType, ChatModelType,
{ {
countTokens: (data: { messages: ChatItemSimpleType[] }) => number; countTokens: (data: { messages: ChatItemType[] }) => number;
sliceText: (data: { text: string; length: number }) => string; sliceText: (data: { text: string; length: number }) => string;
tokenSlice: (data: { tokenSlice: (data: { messages: ChatItemType[]; maxToken: number }) => ChatItemType[];
messages: ChatItemSimpleType[];
maxToken: number;
}) => ChatItemSimpleType[];
} }
> = { > = {
[OpenAiChatEnum.GPT35]: { [OpenAiChatEnum.GPT35]: {
@@ -34,10 +31,5 @@ export const modelToolMap: Record<
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }), countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data }), sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT432k, ...data }) tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT432k, ...data })
},
[ClaudeEnum.Claude]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT35, ...data })
} }
}; };

View File

@@ -1,11 +1,12 @@
import { encoding_for_model, type Tiktoken } from '@dqbd/tiktoken'; import { encoding_for_model, type Tiktoken } from '@dqbd/tiktoken';
import type { ChatItemSimpleType } from '@/types/chat'; import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat'; import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai'; import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { OpenAiChatEnum } from '@/constants/model'; import { OpenAiChatEnum } from '@/constants/model';
import Graphemer from 'graphemer'; import Graphemer from 'graphemer';
import axios from 'axios'; import axios from 'axios';
import dayjs from 'dayjs'; import dayjs from 'dayjs';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
const textDecoder = new TextDecoder(); const textDecoder = new TextDecoder();
const graphemer = new Graphemer(); const graphemer = new Graphemer();
@@ -86,16 +87,19 @@ export const getOpenAiEncMap = () => {
}; };
export const adaptChatItem_openAI = ({ export const adaptChatItem_openAI = ({
messages messages,
reserveId
}: { }: {
messages: ChatItemSimpleType[]; messages: ChatItemType[];
}): ChatCompletionRequestMessage[] => { reserveId: boolean;
}): MessageItemType[] => {
const map = { const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant, [ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User, [ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System [ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
}; };
return messages.map((item) => ({ return messages.map((item) => ({
...(reserveId && { _id: item._id }),
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System, role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || '' content: item.value || ''
})); }));
@@ -105,7 +109,7 @@ export function countOpenAIToken({
messages, messages,
model model
}: { }: {
messages: ChatItemSimpleType[]; messages: ChatItemType[];
model: `${OpenAiChatEnum}`; model: `${OpenAiChatEnum}`;
}) { }) {
function getChatGPTEncodingText( function getChatGPTEncodingText(
@@ -158,7 +162,7 @@ export function countOpenAIToken({
return segments.reduce((memo, i) => memo + i.tokens.length, 0) ?? 0; return segments.reduce((memo, i) => memo + i.tokens.length, 0) ?? 0;
} }
const adaptMessages = adaptChatItem_openAI({ messages }); const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
return text2TokensLen(getOpenAiEncMap()[model], getChatGPTEncodingText(adaptMessages, model)); return text2TokensLen(getOpenAiEncMap()[model], getChatGPTEncodingText(adaptMessages, model));
} }