feat: openapi v2 chat

This commit is contained in:
archer
2023-06-18 19:59:37 +08:00
parent 2b93ae2d00
commit ee9afa310a
27 changed files with 790 additions and 585 deletions

View File

@@ -42,7 +42,7 @@
"nextjs-cors": "^2.1.2",
"nodemailer": "^6.9.1",
"nprogress": "^0.2.0",
"openai": "^3.2.1",
"openai": "^3.3.0",
"papaparse": "^5.4.1",
"pg": "^8.10.0",
"react": "18.2.0",

10
client/pnpm-lock.yaml generated
View File

@@ -105,8 +105,8 @@ dependencies:
specifier: ^0.2.0
version: registry.npmmirror.com/nprogress@0.2.0
openai:
specifier: ^3.2.1
version: registry.npmmirror.com/openai@3.2.1
specifier: ^3.3.0
version: registry.npmmirror.com/openai@3.3.0
papaparse:
specifier: ^5.4.1
version: registry.npmmirror.com/papaparse@5.4.1
@@ -10249,10 +10249,10 @@ packages:
is-wsl: registry.npmmirror.com/is-wsl@2.2.0
dev: true
registry.npmmirror.com/openai@3.2.1:
resolution: {integrity: sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-3.2.1.tgz}
registry.npmmirror.com/openai@3.3.0:
resolution: {integrity: sha512-uqxI/Au+aPRnsaQRe8CojU0eCR7I0mBiKjD3sNMzY6DaC1ZVrc85u98mtJW6voDug8fgGN+DIZmTDxTthxb7dQ==, registry: https://registry.npm.taobao.org/, tarball: https://registry.npmmirror.com/openai/-/openai-3.3.0.tgz}
name: openai
version: 3.2.1
version: 3.3.0
dependencies:
axios: registry.npmmirror.com/axios@0.26.1
form-data: registry.npmmirror.com/form-data@4.0.0

View File

@@ -1,67 +1,104 @@
import { GUIDE_PROMPT_HEADER, NEW_CHATID_HEADER, QUOTE_LEN_HEADER } from '@/constants/chat';
import { Props, ChatResponseType } from '@/pages/api/openapi/v1/chat/completions';
import { sseResponseEventEnum } from '@/constants/chat';
import { getErrText } from '@/utils/tools';
interface StreamFetchProps {
url: string;
data: any;
data: Props;
onMessage: (text: string) => void;
abortSignal: AbortController;
}
export const streamFetch = ({ url, data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<{
responseText: string;
newChatId: string;
systemPrompt: string;
quoteLen: number;
}>(async (resolve, reject) => {
export const streamFetch = ({ data, onMessage, abortSignal }: StreamFetchProps) =>
new Promise<ChatResponseType & { responseText: string }>(async (resolve, reject) => {
try {
const res = await fetch(url, {
const response = await window.fetch('/api/openapi/v1/chat/completions', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data),
signal: abortSignal.signal
signal: abortSignal.signal,
body: JSON.stringify({
...data,
stream: true
})
});
const reader = res.body?.getReader();
if (!reader) return;
const decoder = new TextDecoder();
if (!response?.body) {
throw new Error('Request Error');
}
const newChatId = decodeURIComponent(res.headers.get(NEW_CHATID_HEADER) || '');
const systemPrompt = decodeURIComponent(res.headers.get(GUIDE_PROMPT_HEADER) || '').trim();
const quoteLen = res.headers.get(QUOTE_LEN_HEADER)
? Number(res.headers.get(QUOTE_LEN_HEADER))
: 0;
const reader = response.body?.getReader();
const decoder = new TextDecoder('utf-8');
// response data
let responseText = '';
let newChatId = '';
let quoteLen = 0;
const read = async () => {
try {
const { done, value } = await reader?.read();
const { done, value } = await reader.read();
if (done) {
if (res.status === 200) {
resolve({ responseText, newChatId, quoteLen, systemPrompt });
if (response.status === 200) {
return resolve({
responseText,
newChatId,
quoteLen
});
} else {
const parseError = JSON.parse(responseText);
reject(parseError?.message || '请求异常');
return reject('响应过程出现异常~');
}
return;
}
const text = decoder.decode(value);
responseText += text;
onMessage(text);
const chunk = decoder.decode(value);
const chunkLines = chunk.split('\n\n').filter((item) => item);
const chunkResponse = chunkLines.map((item) => {
const splitEvent = item.split('\n');
if (splitEvent.length === 2) {
return {
event: splitEvent[0].replace('event: ', ''),
data: splitEvent[1].replace('data: ', '')
};
}
return {
event: '',
data: splitEvent[0].replace('data: ', '')
};
});
chunkResponse.forEach((item) => {
// parse json data
const data = (() => {
try {
return JSON.parse(item.data);
} catch (error) {
return item.data;
}
})();
if (item.event === sseResponseEventEnum.answer && data !== '[DONE]') {
const answer: string = data?.choices[0].delta.content || '';
onMessage(answer);
responseText += answer;
} else if (item.event === sseResponseEventEnum.chatResponse) {
const chatResponse = data as ChatResponseType;
newChatId = chatResponse.newChatId;
quoteLen = chatResponse.quoteLen || 0;
}
});
read();
} catch (err: any) {
if (err?.message === 'The user aborted a request.') {
return resolve({ responseText, newChatId, quoteLen, systemPrompt });
return resolve({
responseText,
newChatId,
quoteLen
});
}
reject(typeof err === 'string' ? err : err?.message || '请求异常');
reject(getErrText(err, '请求异常'));
}
};
read();
} catch (err: any) {
console.log(err, '====');
reject(typeof err === 'string' ? err : err?.message || '请求异常');
reject(getErrText(err, '请求异常'));
}
});

View File

@@ -4,6 +4,7 @@ import type { ChatItemType } from '@/types/chat';
export interface InitChatResponse {
chatId: string;
modelId: string;
systemPrompt?: string;
model: {
name: string;
avatar: string;

View File

@@ -1,6 +1,7 @@
export const NEW_CHATID_HEADER = 'response-new-chat-id';
export const QUOTE_LEN_HEADER = 'response-quote-len';
export const GUIDE_PROMPT_HEADER = 'response-guide-prompt';
export enum sseResponseEventEnum {
answer = 'answer',
chatResponse = 'chatResponse'
}
export enum ChatRoleEnum {
System = 'System',

View File

@@ -12,11 +12,8 @@ export enum OpenAiChatEnum {
'GPT4' = 'gpt-4',
'GPT432k' = 'gpt-4-32k'
}
export enum ClaudeEnum {
'Claude' = 'Claude'
}
export type ChatModelType = `${OpenAiChatEnum}` | `${ClaudeEnum}`;
export type ChatModelType = `${OpenAiChatEnum}`;
export type ChatModelItemType = {
chatModel: ChatModelType;
@@ -59,14 +56,6 @@ export const ChatModelMap = {
systemMaxToken: 8000,
maxTemperature: 1.2,
price: 90
},
[ClaudeEnum.Claude]: {
chatModel: ClaudeEnum.Claude,
name: 'Claude(免费体验)',
contextMaxToken: 9000,
systemMaxToken: 2700,
maxTemperature: 1,
price: 0
}
};

View File

@@ -1,192 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { appKbSearch } from '../openapi/kb/appKbSearch';
import { ChatRoleEnum, QUOTE_LEN_HEADER, GUIDE_PROMPT_HEADER } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '../openapi/text/sensitiveCheck';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { saveChat } from './saveChat';
import { Types } from 'mongoose';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { chatId, prompt, modelId } = req.body as {
prompt: [ChatItemType, ChatItemType];
modelId: string;
chatId?: string;
};
if (!modelId || !prompt || prompt.length !== 2) {
throw new Error('Chat 缺少参数');
}
await connectToDatabase();
let startTime = Date.now();
const { model, showModelDetail, content, userOpenAiKey, systemAuthKey, userId } =
await authChat({
modelId,
chatId,
req
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: content[content.length - 1]?.quote || [],
prompt: prompt[0],
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res.setHeader(NEW_CHATID_HEADER, conversationId);
if (showModelDetail) {
userSystemPrompt[0] &&
res.setHeader(GUIDE_PROMPT_HEADER, encodeURIComponent(userSystemPrompt[0].value));
res.setHeader(QUOTE_LEN_HEADER, rawSearch.length);
}
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
quote: [],
value: response
}
],
userId
});
return res.end(response);
}
// 读取对话内容
const prompts = [...quotePrompt, ...content, ...userSystemPrompt, prompt[0]];
// content check
await sensitiveCheck({
input: [...quotePrompt, ...userSystemPrompt, prompt[0]].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出 chat 请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: prompts,
stream: true,
res,
chatId: conversationId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages, responseContent } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
// save chat
await saveChat({
chatId,
newChatId: conversationId,
modelId,
prompts: [
prompt[0],
{
...prompt[1],
value: responseContent,
quote: showModelDetail ? rawSearch : [],
systemPrompt: showModelDetail ? userSystemPrompt[0]?.value : ''
}
],
userId
});
res.end();
// 只有使用平台的 key 才计费
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
chatId: conversationId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -20,31 +20,32 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
let model: ModelSchema;
// 没有 modelId 时直接获取用户的第一个id
if (!modelId) {
const myModel = await Model.findOne({ userId });
if (!myModel) {
const { _id } = await Model.create({
name: '应用1',
userId
});
model = (await Model.findById(_id)) as ModelSchema;
const model = await (async () => {
if (!modelId) {
const myModel = await Model.findOne({ userId });
if (!myModel) {
const { _id } = await Model.create({
name: '应用1',
userId
});
return (await Model.findById(_id)) as ModelSchema;
} else {
return myModel;
}
} else {
model = myModel;
// 校验使用权限
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
return authRes.model;
}
modelId = model._id;
} else {
// 校验使用权限
const authRes = await authModel({
modelId,
userId,
authUser: false,
authOwner: false
});
model = authRes.model;
}
})();
modelId = modelId || model._id;
// 历史记录
let history: ChatItemType[] = [];
@@ -86,6 +87,8 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
]);
}
const isOwner = String(model.userId) === userId;
jsonRes<InitChatResponse>(res, {
data: {
chatId: chatId || '',
@@ -94,9 +97,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
name: model.name,
avatar: model.avatar,
intro: model.intro,
canUse: model.share.isShare || String(model.userId) === userId
canUse: model.share.isShare || isOwner
},
chatModel: model.chat.chatModel,
systemPrompt: isOwner ? model.chat.systemPrompt : '',
history
}
});

View File

@@ -4,10 +4,9 @@ import { ChatItemType } from '@/types/chat';
import { connectToDatabase, Chat, Model } from '@/service/mongo';
import { authModel } from '@/service/utils/auth';
import { authUser } from '@/service/utils/auth';
import mongoose from 'mongoose';
import { Types } from 'mongoose';
type Props = {
newChatId?: string;
chatId?: string;
modelId: string;
prompts: [ChatItemType, ChatItemType];
@@ -16,7 +15,7 @@ type Props = {
/* 聊天内容存存储 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
const { chatId, modelId, prompts, newChatId } = req.body as Props;
const { chatId, modelId, prompts } = req.body as Props;
if (!prompts) {
throw new Error('缺少参数');
@@ -24,16 +23,15 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { userId } = await authUser({ req, authToken: true });
const nId = await saveChat({
const response = await saveChat({
chatId,
modelId,
prompts,
newChatId,
userId
});
jsonRes(res, {
data: nId
data: response
});
} catch (err) {
jsonRes(res, {
@@ -44,58 +42,54 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
}
export async function saveChat({
chatId,
newChatId,
chatId,
modelId,
prompts,
userId
}: Props & { userId: string }) {
}: Props & { newChatId?: Types.ObjectId; userId: string }) {
await connectToDatabase();
const { model } = await authModel({ modelId, userId, authOwner: false });
const content = prompts.map((item) => ({
_id: item._id ? new mongoose.Types.ObjectId(item._id) : undefined,
_id: item._id,
obj: item.obj,
value: item.value,
systemPrompt: item.systemPrompt,
systemPrompt: item.systemPrompt || '',
quote: item.quote || []
}));
const [id] = await Promise.all([
...(chatId // update chat
? [
Chat.findByIdAndUpdate(chatId, {
$push: {
content: {
$each: content
}
},
title: content[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date()
}).then(() => '')
]
: [
Chat.create({
_id: newChatId ? new mongoose.Types.ObjectId(newChatId) : undefined,
userId,
modelId,
content,
title: content[0].value.slice(0, 20),
latestChat: content[1].value
}).then((res) => res._id)
]),
// update model
...(String(model.userId) === userId
? [
Model.findByIdAndUpdate(modelId, {
updateTime: new Date()
})
]
: [])
]);
if (String(model.userId) === userId) {
Model.findByIdAndUpdate(modelId, {
updateTime: new Date()
});
}
const response = await (chatId
? Chat.findByIdAndUpdate(chatId, {
$push: {
content: {
$each: content
}
},
title: content[0].value.slice(0, 20),
latestChat: content[1].value,
updateTime: new Date()
}).then(() => ({
newChatId: ''
}))
: Chat.create({
_id: newChatId,
userId,
modelId,
content,
title: content[0].value.slice(0, 20),
latestChat: content[1].value
}).then((res) => ({
newChatId: String(res._id)
})));
return {
id
...response
};
}

View File

@@ -1,149 +0,0 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authShareChat } from '@/service/utils/auth';
import { modelServiceToolMap } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { resStreamResponse } from '@/service/utils/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { BillTypeEnum } from '@/constants/user';
import { sensitiveCheck } from '../../openapi/text/sensitiveCheck';
import { appKbSearch } from '../../openapi/kb/appKbSearch';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
try {
const { shareId, password, historyId, prompts } = req.body as {
prompts: ChatItemSimpleType[];
password: string;
shareId: string;
historyId: string;
};
if (!historyId || !prompts) {
throw new Error('分享链接无效');
}
await connectToDatabase();
let startTime = Date.now();
const { model, userOpenAiKey, systemAuthKey, userId } = await authShareChat({
shareId,
password
});
const modelConstantsData = ChatModelMap[model.chat.chatModel];
const prompt = prompts[prompts.length - 1];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: [],
prompt: prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch: rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
return res.end(response);
}
// 读取对话内容
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
// content check
await sensitiveCheck({
input: [...quotePrompt, ...userSystemPrompt, prompt].map((item) => item.value).join('')
});
// 计算温度
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// 发出请求
const { streamResponse, responseMessages } = await modelServiceToolMap[
model.chat.chatModel
].chatCompletion({
apiKey: userOpenAiKey || systemAuthKey,
temperature: +temperature,
messages: completePrompts,
stream: true,
res,
chatId: historyId
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
try {
const { totalTokens, finishMessages } = await resStreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
res.end();
/* bill */
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens,
type: BillTypeEnum.chat
});
updateShareChatBill({
shareId,
tokens: totalTokens
});
} catch (error) {
res.end();
console.log('error结束', error);
}
} catch (err: any) {
res.status(500);
jsonRes(res, {
code: 500,
error: err
});
}
}

View File

@@ -2,15 +2,13 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey } from '@/service/utils/auth';
import { modelServiceToolMap, resStreamResponse } from '@/service/utils/chat';
import { ChatItemSimpleType } from '@/types/chat';
import { ChatItemType } from '@/types/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { NEW_CHATID_HEADER } from '@/constants/chat';
import { Types } from 'mongoose';
import { appKbSearch } from '../kb/appKbSearch';
/* 发送提示词 */
@@ -31,7 +29,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
isStream = true
} = req.body as {
chatId?: string;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
modelId: string;
isStream: boolean;
};
@@ -111,10 +109,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
2
);
// get conversationId. create a newId if it is null
const conversationId = chatId || String(new Types.ObjectId());
!chatId && res?.setHeader(NEW_CHATID_HEADER, conversationId);
// 发出请求
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
@@ -122,8 +116,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
temperature: +temperature,
messages: completePrompts,
stream: isStream,
res,
chatId: conversationId
res
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);

View File

@@ -3,7 +3,7 @@ import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { PgClient } from '@/service/pg';
import { withNextCors } from '@/service/utils/tools';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import type { ModelSchema } from '@/types/mongoSchema';
import { authModel } from '@/service/utils/auth';
import { ChatModelMap } from '@/constants/model';
@@ -18,7 +18,7 @@ export type QuoteItemType = {
source?: string;
};
type Props = {
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
similarity: number;
limit: number;
appId: string;
@@ -79,15 +79,15 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function appKbSearch({
model,
userId,
fixedQuote,
fixedQuote = [],
prompt,
similarity = 0.8,
limit = 5
}: {
model: ModelSchema;
userId: string;
fixedQuote: QuoteItemType[];
prompt: ChatItemSimpleType;
fixedQuote?: QuoteItemType[];
prompt: ChatItemType;
similarity: number;
limit: number;
}): Promise<Response> {
@@ -120,7 +120,7 @@ export async function appKbSearch({
...searchRes.slice(0, 3),
...fixedQuote.slice(0, 2),
...searchRes.slice(3),
...fixedQuote.slice(2, 4)
...fixedQuote.slice(2, Math.floor(fixedQuote.length * 0.4))
].filter((item) => {
if (idSet.has(item.id)) {
return false;

View File

@@ -2,18 +2,18 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken } from '@/utils/plugin/openai';
import { OpenAiChatEnum } from '@/constants/model';
type ModelType = `${OpenAiChatEnum}`;
type Props = {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: ModelType;
maxLen: number;
};
type Response = ChatItemSimpleType[];
type Response = ChatItemType[];
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -45,11 +45,11 @@ export function gpt_chatItemTokenSlice({
model,
maxToken
}: {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: ModelType;
maxToken: number;
}) {
let result: ChatItemSimpleType[] = [];
let result: ChatItemType[] = [];
for (let i = 0; i < messages.length; i++) {
const msgs = [...result, messages[i]];

View File

@@ -0,0 +1,311 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authModel, getApiKey, authShareChat, type AuthType } from '@/service/utils/auth';
import { modelServiceToolMap, V2_StreamResponse } from '@/service/utils/chat';
import { jsonRes } from '@/service/response';
import { ChatModelMap } from '@/constants/model';
import { pushChatBill, updateShareChatBill } from '@/service/events/pushBill';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { withNextCors } from '@/service/utils/tools';
import { BillTypeEnum } from '@/constants/user';
import { appKbSearch } from '../../../openapi/kb/appKbSearch';
import type { CreateChatCompletionRequest } from 'openai';
import { gptMessage2ChatType, textAdaptGptResponse } from '@/utils/adapt';
import { getChatHistory } from './getHistory';
import { saveChat } from '@/pages/api/chat/saveChat';
import { sseResponse } from '@/service/utils/tools';
import { getErrText } from '@/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { Types } from 'mongoose';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
chatId?: string; // undefined: nonuse history, '': new chat, 'xxxxx': use history
appId?: string;
};
type FastGptShareChatProps = {
password?: string;
shareId?: string;
};
export type Props = CreateChatCompletionRequest &
FastGptWebChatProps &
FastGptShareChatProps & {
messages: MessageItemType[];
};
export type ChatResponseType = {
newChatId: string;
quoteLen?: number;
};
/* 发送提示词 */
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse) {
res.on('close', () => {
res.end();
});
res.on('error', () => {
console.log('error: ', 'request error');
res.end();
});
let { chatId, appId, shareId, password = '', stream = false, messages = [] } = req.body as Props;
let step = 0;
try {
if (!messages) {
throw new Error('Prams Error');
}
if (!Array.isArray(messages)) {
throw new Error('messages is not array');
}
await connectToDatabase();
let startTime = Date.now();
/* user auth */
const {
userId,
appId: authAppid,
authType
} = await (shareId
? authShareChat({
shareId,
password
})
: authUser({ req }));
appId = appId ? appId : authAppid;
if (!appId) {
throw new Error('appId is empty');
}
// auth app permission
const { model, showModelDetail } = await authModel({
userId,
modelId: appId,
authOwner: false,
reserveDetail: true
});
const showAppDetail = !shareId && showModelDetail;
/* get api key */
const { systemAuthKey: apiKey, userOpenAiKey } = await getApiKey({
model: model.chat.chatModel,
userId,
mustPay: authType !== 'token'
});
// get history
const { history } = await getChatHistory({ chatId, userId });
const prompts = history.concat(gptMessage2ChatType(messages));
// adapt fastgpt web
if (prompts[prompts.length - 1].obj === 'AI') {
prompts.pop();
}
// user question
const prompt = prompts[prompts.length - 1];
const {
rawSearch = [],
userSystemPrompt = [],
quotePrompt = []
} = await (async () => {
// 使用了知识库搜索
if (model.chat.relatedKbs?.length > 0) {
const { rawSearch, userSystemPrompt, quotePrompt } = await appKbSearch({
model,
userId,
fixedQuote: history[history.length - 1]?.quote,
prompt,
similarity: model.chat.searchSimilarity,
limit: model.chat.searchLimit
});
return {
rawSearch,
userSystemPrompt: userSystemPrompt ? [userSystemPrompt] : [],
quotePrompt: [quotePrompt]
};
}
if (model.chat.systemPrompt) {
return {
userSystemPrompt: [
{
obj: ChatRoleEnum.System,
value: model.chat.systemPrompt
}
]
};
}
return {};
})();
// search result is empty
if (model.chat.relatedKbs?.length > 0 && !quotePrompt[0]?.value && model.chat.searchEmptyText) {
const response = model.chat.searchEmptyText;
if (stream) {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: response,
model: model.chat.chatModel,
finish_reason: 'stop'
})
});
return res.end();
} else {
return res.json({
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
choices: [
{ message: [{ role: 'assistant', content: response }], finish_reason: 'stop', index: 0 }
]
});
}
}
// api messages. [quote,context,systemPrompt,question]
const completePrompts = [...quotePrompt, ...prompts.slice(0, -1), ...userSystemPrompt, prompt];
// chat temperature
const modelConstantsData = ChatModelMap[model.chat.chatModel];
// FastGpt temperature range: 1~10
const temperature = (modelConstantsData.maxTemperature * (model.chat.temperature / 10)).toFixed(
2
);
// start model api. responseText and totalTokens: valid only if stream = false
const { streamResponse, responseMessages, responseText, totalTokens } =
await modelServiceToolMap[model.chat.chatModel].chatCompletion({
apiKey: userOpenAiKey || apiKey,
temperature: +temperature,
messages: completePrompts,
stream,
res
});
console.log('api response time:', `${(Date.now() - startTime) / 1000}s`);
if (res.closed) return res.end();
// create a chatId
const newChatId = chatId === '' ? new Types.ObjectId() : undefined;
// response answer
const {
textLen = 0,
answer = responseText,
tokens = totalTokens
} = await (async () => {
if (stream) {
// 创建响应流
res.setHeader('Content-Type', 'text/event-stream;charset-utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('Transfer-Encoding', 'chunked');
res.setHeader('X-Accel-Buffering', 'no');
res.setHeader('Cache-Control', 'no-cache, no-transform');
step = 1;
try {
// response newChatId and quota
sseResponse({
res,
event: sseResponseEventEnum.chatResponse,
data: JSON.stringify({
newChatId,
quoteLen: rawSearch.length
})
});
// response answer
const { finishMessages, totalTokens, responseContent } = await V2_StreamResponse({
model: model.chat.chatModel,
res,
chatResponse: streamResponse,
prompts: responseMessages
});
return {
answer: responseContent,
textLen: finishMessages.map((item) => item.value).join('').length,
tokens: totalTokens
};
} catch (error) {
console.log('stream response error', error);
return {};
}
} else {
return {
textLen: responseMessages.map((item) => item.value).join('').length
};
}
})();
// save chat history
if (typeof chatId === 'string') {
await saveChat({
newChatId,
chatId,
modelId: appId,
prompts: [
prompt,
{
_id: messages[messages.length - 1]._id,
obj: ChatRoleEnum.AI,
value: answer,
...(showAppDetail
? {
quote: rawSearch,
systemPrompt: userSystemPrompt?.[0]?.value
}
: {})
}
],
userId
});
}
// close response
if (stream) {
res.end();
} else {
res.json({
...(showAppDetail
? {
rawSearch
}
: {}),
newChatId,
id: chatId || '',
model: model.chat.chatModel,
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: tokens },
choices: [
{ message: [{ role: 'assistant', content: answer }], finish_reason: 'stop', index: 0 }
]
});
}
pushChatBill({
isPay: !userOpenAiKey,
chatModel: model.chat.chatModel,
userId,
textLen,
tokens,
type: authType === 'apikey' ? BillTypeEnum.openapiChat : BillTypeEnum.chat
});
shareId &&
updateShareChatBill({
shareId,
tokens
});
} catch (err: any) {
res.status(500);
if (step === 1) {
res.end(getErrText(err, 'Stream response error'));
} else {
jsonRes(res, {
code: 500,
error: err
});
}
}
});

View File

@@ -0,0 +1,66 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@/service/response';
import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat';
export type Props = {
chatId?: string;
limit?: number;
};
export type Response = { history: ChatItemType[] };
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { userId } = await authUser({ req });
const { chatId, limit } = req.body as Props;
jsonRes<Response>(res, {
data: await getChatHistory({
chatId,
userId,
limit
})
});
} catch (err) {
jsonRes(res, {
code: 500,
error: err
});
}
}
export async function getChatHistory({
chatId,
userId,
limit = 50
}: Props & { userId: string }): Promise<Response> {
if (!chatId) {
return { history: [] };
}
const history = await Chat.aggregate([
{ $match: { _id: new Types.ObjectId(chatId), userId: new Types.ObjectId(userId) } },
{
$project: {
content: {
$slice: ['$content', -limit] // 返回 content 数组的最后50个元素
}
}
},
{ $unwind: '$content' },
{
$project: {
_id: '$content._id',
obj: '$content.obj',
value: '$content.value',
quote: '$content.quote'
}
}
]);
return { history };
}

View File

@@ -59,6 +59,7 @@ const History = dynamic(() => import('./components/History'), {
});
import styles from './index.module.scss';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
const textareaMinH = '22px';
@@ -170,19 +171,15 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
controller.current = abortSignal;
isLeavePage.current = false;
const prompt: ChatItemType[] = prompts.map((item) => ({
_id: item._id,
obj: item.obj,
value: item.value
}));
const messages = adaptChatItem_openAI({ messages: prompts, reserveId: true });
// 流请求,获取数据
const { newChatId, quoteLen, systemPrompt } = await streamFetch({
url: '/api/chat/chat',
const { newChatId, quoteLen } = await streamFetch({
data: {
prompt,
messages,
chatId,
modelId
appId: modelId,
model: ''
},
onMessage: (text: string) => {
setChatData((state) => ({
@@ -222,7 +219,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
...item,
status: 'finish',
quoteLen,
systemPrompt
systemPrompt: chatData.systemPrompt
};
})
}));
@@ -237,6 +234,7 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
[
chatId,
modelId,
chatData.systemPrompt,
setChatData,
loadHistory,
loadMyModels,
@@ -328,8 +326,8 @@ const Chat = ({ modelId, chatId }: { modelId: string; chatId: string }) => {
// 删除一句话
const delChatRecord = useCallback(
async (index: number, historyId: string) => {
if (!messageContextMenuData) return;
async (index: number, historyId?: string) => {
if (!messageContextMenuData || !historyId) return;
setIsLoading(true);
try {

View File

@@ -56,6 +56,7 @@ const ShareHistory = dynamic(() => import('./components/ShareHistory'), {
});
import styles from './index.module.scss';
import { adaptChatItem_openAI } from '@/utils/plugin/openai';
const textareaMinH = '22px';
@@ -170,19 +171,15 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
controller.current = abortSignal;
isLeavePage.current = false;
const formatPrompts = prompts.map((item) => ({
obj: item.obj,
value: item.value
}));
const messages = adaptChatItem_openAI({ messages: prompts, reserveId: true });
// 流请求,获取数据
const { responseText } = await streamFetch({
url: '/api/chat/shareChat/chat',
data: {
prompts: formatPrompts.slice(-shareChatData.maxContext - 1, -1),
messages: messages.slice(-shareChatData.maxContext - 1, -1),
password,
shareId,
historyId
model: ''
},
onMessage: (text: string) => {
setShareChatData((state) => ({
@@ -226,7 +223,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
setShareChatHistory({
historyId,
shareId,
title: formatPrompts[formatPrompts.length - 2].value,
title: prompts[prompts.length - 2].value,
latestChat: responseText,
chats: responseHistory
});
@@ -235,7 +232,7 @@ const Chat = ({ shareId, historyId }: { shareId: string; historyId: string }) =>
{
type: 'shareChatFinish',
data: {
question: formatPrompts[formatPrompts.length - 2].value,
question: prompts[prompts.length - 2].value,
answer: responseText
}
},

View File

@@ -52,7 +52,7 @@ const ChatSchema = new Schema({
},
value: {
type: String,
required: true
default: ''
},
quote: {
type: [

View File

@@ -3,15 +3,16 @@ import jwt from 'jsonwebtoken';
import Cookie from 'cookie';
import { Chat, Model, OpenApi, User, ShareChat, KB } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import mongoose from 'mongoose';
import { ClaudeEnum, defaultModel, embeddingModel, EmbeddingModelType } from '@/constants/model';
import { defaultModel } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export type ApiKeyType = 'training' | 'chat';
export type AuthType = 'token' | 'root' | 'apikey';
export const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => {
@@ -39,13 +40,11 @@ export const parseCookie = (cookie?: string): Promise<string> => {
export const authUser = async ({
req,
authToken = false,
authOpenApi = false,
authRoot = false,
authBalance = false
}: {
req: NextApiRequest;
authToken?: boolean;
authOpenApi?: boolean;
authRoot?: boolean;
authBalance?: boolean;
}) => {
@@ -71,6 +70,36 @@ export const authUser = async ({
return Promise.reject(error);
}
};
const parseAuthorization = async (authorization?: string) => {
if (!authorization) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// Bearer fastgpt-xxxx-appId
const auth = authorization.split(' ')[1];
if (!auth) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const { apiKey, appId } = await (async () => {
const arr = auth.split('-');
if (arr.length !== 3) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return {
apiKey: `${arr[0]}-${arr[1]}`,
appId: arr[2]
};
})();
// auth apiKey
const uid = await parseOpenApiKey(apiKey);
return {
uid,
appId
};
};
const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization);
@@ -78,31 +107,43 @@ export const authUser = async ({
return userId;
};
const { cookie, apikey, rootkey, userid } = (req.headers || {}) as {
const { cookie, apikey, rootkey, userid, authorization } = (req.headers || {}) as {
cookie?: string;
apikey?: string;
rootkey?: string;
userid?: string;
authorization?: string;
};
let uid = '';
let appId = '';
let authType: AuthType = 'token';
if (authToken) {
uid = await parseCookie(cookie);
} else if (authOpenApi) {
uid = await parseOpenApiKey(apikey);
authType = 'token';
} else if (authRoot) {
uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else if (cookie) {
uid = await parseCookie(cookie);
authType = 'token';
} else if (apikey) {
uid = await parseOpenApiKey(apikey);
authType = 'apikey';
} else if (authorization) {
const authResponse = await parseAuthorization(authorization);
uid = authResponse.uid;
appId = authResponse.appId;
authType = 'apikey';
} else if (rootkey) {
uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// balance check
if (authBalance) {
const user = await User.findById(uid);
if (!user) {
@@ -115,7 +156,9 @@ export const authUser = async ({
}
return {
userId: uid
userId: uid,
appId,
authType
};
};
@@ -173,15 +216,15 @@ export const getApiKey = async ({
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getGpt4Key() as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',
systemAuthKey: process.env.CLAUDE_KEY as string
}
};
if (!keyMap[model]) {
return Promise.reject('App model is exists');
}
// 有自己的key
if (!mustPay && keyMap[model]?.userOpenAiKey) {
if (!mustPay && keyMap[model].userOpenAiKey) {
return {
user,
userOpenAiKey: keyMap[model].userOpenAiKey,
@@ -240,7 +283,7 @@ export const authModel = async ({
return {
model,
showModelDetail: model.share.isShareDetail || userId === String(model.userId)
showModelDetail: userId === String(model.userId)
};
};
@@ -277,7 +320,7 @@ export const authChat = async ({
});
// 聊天内容
let content: ChatItemSimpleType[] = [];
let content: ChatItemType[] = [];
if (chatId) {
// 获取 chat 数据
@@ -336,28 +379,9 @@ export const authShareChat = async ({
});
}
const modelId = String(shareChat.modelId);
const userId = String(shareChat.userId);
// 获取 model 数据
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 获取 user 的 apiKey
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: model.chat.chatModel,
userId
});
return {
userOpenAiKey,
systemAuthKey,
userId,
model,
showModelDetail
userId: String(shareChat.userId),
appId: String(shareChat.modelId),
authType: 'token' as AuthType
};
};

View File

@@ -1,35 +1,37 @@
import { ChatItemSimpleType } from '@/types/chat';
import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '../tools';
import { OpenAiChatEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import { claudChat, claudStreamResponse } from './claude';
import type { NextApiResponse } from 'next';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { textAdaptGptResponse } from '@/utils/adapt';
export type ChatCompletionType = {
apiKey: string;
temperature: number;
messages: ChatItemSimpleType[];
messages: ChatItemType[];
chatId?: string;
[key: string]: any;
};
export type ChatCompletionResponseType = {
streamResponse: any;
responseMessages: ChatItemSimpleType[];
responseMessages: ChatItemType[];
responseText: string;
totalTokens: number;
};
export type StreamResponseType = {
chatResponse: any;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
res: NextApiResponse;
[key: string]: any;
};
export type StreamResponseReturnType = {
responseContent: string;
totalTokens: number;
finishMessages: ChatItemSimpleType[];
finishMessages: ChatItemType[];
};
export const modelServiceToolMap: Record<
@@ -74,10 +76,6 @@ export const modelServiceToolMap: Record<
model: OpenAiChatEnum.GPT432k,
...data
})
},
[ClaudeEnum.Claude]: {
chatCompletion: claudChat,
streamResponse: claudStreamResponse
}
};
@@ -95,11 +93,11 @@ export const ChatContextFilter = ({
maxTokens
}: {
model: ChatModelType;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
maxTokens: number;
}) => {
const systemPrompts: ChatItemSimpleType[] = [];
const chatPrompts: ChatItemSimpleType[] = [];
const systemPrompts: ChatItemType[] = [];
const chatPrompts: ChatItemType[] = [];
let rawTextLen = 0;
prompts.forEach((item) => {
@@ -107,6 +105,7 @@ export const ChatContextFilter = ({
rawTextLen += val.length;
const data = {
_id: item._id,
obj: item.obj,
value: val
};
@@ -129,7 +128,7 @@ export const ChatContextFilter = ({
});
// 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = [];
const chats: ChatItemType[] = [];
// 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) {
@@ -174,3 +173,89 @@ export const resStreamResponse = async ({
return { responseContent, totalTokens, finishMessages };
};
/* stream response */
export const V2_StreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
let responseContent = '';
try {
const onParse = async (e: ParsedEvent | ReconnectInterval) => {
if (e.type !== 'event') return;
const data = e.data;
const { content = '' } = (() => {
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
return { content };
} catch (error) {}
return {};
})();
if (res.closed) return;
if (data === '[DONE]') {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
})
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
} else {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: content
})
});
}
};
try {
const parser = createParser(onParse);
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (res.closed) {
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
} catch (error) {
console.log('stream error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
};

View File

@@ -28,13 +28,13 @@ export const chatResponse = async ({
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85)
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature) || 0,
temperature: Number(temperature || 0),
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容

View File

@@ -4,6 +4,7 @@ import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
import { sseResponseEventEnum } from '@/constants/chat';
/* 密码加密 */
export const hashPassword = (psw: string) => {
@@ -67,3 +68,16 @@ export const startQueue = () => {
generateVector();
}
};
export const sseResponse = ({
res,
event,
data
}: {
res: NextApiResponse;
event?: `${sseResponseEventEnum}`;
data: string;
}) => {
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};

View File

@@ -44,7 +44,7 @@ type State = {
delShareChatHistory: (shareId?: string) => void;
};
const defaultChatData = {
const defaultChatData: ChatType = {
chatId: 'chatId',
modelId: 'modelId',
model: {

View File

@@ -4,16 +4,14 @@ import { QuoteItemType } from '@/pages/api/openapi/kb/appKbSearch';
export type ExportChatType = 'md' | 'pdf' | 'html';
export type ChatItemSimpleType = {
export type ChatItemType = {
_id?: string;
obj: `${ChatRoleEnum}`;
value: string;
quoteLen?: number;
quote?: QuoteItemType[];
systemPrompt?: string;
};
export type ChatItemType = {
_id: string;
} & ChatItemSimpleType;
export type ChatSiteItemType = {
status: 'loading' | 'finish';

View File

@@ -2,6 +2,10 @@ import { formatPrice } from './user';
import dayjs from 'dayjs';
import type { BillSchema } from '../types/mongoSchema';
import type { UserBillType } from '@/types/user';
import { ChatItemType } from '@/types/chat';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatRoleEnum } from '@/constants/chat';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
export const adaptBill = (bill: BillSchema): UserBillType => {
return {
@@ -14,3 +18,37 @@ export const adaptBill = (bill: BillSchema): UserBillType => {
price: formatPrice(bill.price)
};
};
export const gptMessage2ChatType = (messages: MessageItemType[]): ChatItemType[] => {
const roleMap = {
[ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI,
[ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human,
[ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System
};
return messages.map((item) => ({
_id: item._id,
obj: roleMap[item.role],
value: item.content
}));
};
export const textAdaptGptResponse = ({
text,
model,
finish_reason = null,
extraData = {}
}: {
model?: string;
text: string | null;
finish_reason?: null | 'stop';
extraData?: Object;
}) => {
return JSON.stringify({
...extraData,
id: '',
object: '',
created: 0,
model,
choices: [{ delta: text === null ? {} : { content: text }, index: 0, finish_reason }]
});
};

View File

@@ -1,18 +1,15 @@
import { ClaudeEnum, OpenAiChatEnum } from '@/constants/model';
import { OpenAiChatEnum } from '@/constants/model';
import type { ChatModelType } from '@/constants/model';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { countOpenAIToken, openAiSliceTextByToken } from './openai';
import { gpt_chatItemTokenSlice } from '@/pages/api/openapi/text/gptMessagesSlice';
export const modelToolMap: Record<
ChatModelType,
{
countTokens: (data: { messages: ChatItemSimpleType[] }) => number;
countTokens: (data: { messages: ChatItemType[] }) => number;
sliceText: (data: { text: string; length: number }) => string;
tokenSlice: (data: {
messages: ChatItemSimpleType[];
maxToken: number;
}) => ChatItemSimpleType[];
tokenSlice: (data: { messages: ChatItemType[]; maxToken: number }) => ChatItemType[];
}
> = {
[OpenAiChatEnum.GPT35]: {
@@ -34,10 +31,5 @@ export const modelToolMap: Record<
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT432k, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT432k, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT432k, ...data })
},
[ClaudeEnum.Claude]: {
countTokens: ({ messages }) => countOpenAIToken({ model: OpenAiChatEnum.GPT35, messages }),
sliceText: (data) => openAiSliceTextByToken({ model: OpenAiChatEnum.GPT35, ...data }),
tokenSlice: (data) => gpt_chatItemTokenSlice({ model: OpenAiChatEnum.GPT35, ...data })
}
};

View File

@@ -1,11 +1,12 @@
import { encoding_for_model, type Tiktoken } from '@dqbd/tiktoken';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import { ChatRoleEnum } from '@/constants/chat';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { OpenAiChatEnum } from '@/constants/model';
import Graphemer from 'graphemer';
import axios from 'axios';
import dayjs from 'dayjs';
import type { MessageItemType } from '@/pages/api/openapi/v1/chat/completions';
const textDecoder = new TextDecoder();
const graphemer = new Graphemer();
@@ -86,16 +87,19 @@ export const getOpenAiEncMap = () => {
};
export const adaptChatItem_openAI = ({
messages
messages,
reserveId
}: {
messages: ChatItemSimpleType[];
}): ChatCompletionRequestMessage[] => {
messages: ChatItemType[];
reserveId: boolean;
}): MessageItemType[] => {
const map = {
[ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant,
[ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User,
[ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System
};
return messages.map((item) => ({
...(reserveId && { _id: item._id }),
role: map[item.obj] || ChatCompletionRequestMessageRoleEnum.System,
content: item.value || ''
}));
@@ -105,7 +109,7 @@ export function countOpenAIToken({
messages,
model
}: {
messages: ChatItemSimpleType[];
messages: ChatItemType[];
model: `${OpenAiChatEnum}`;
}) {
function getChatGPTEncodingText(
@@ -158,7 +162,7 @@ export function countOpenAIToken({
return segments.reduce((memo, i) => memo + i.tokens.length, 0) ?? 0;
}
const adaptMessages = adaptChatItem_openAI({ messages });
const adaptMessages = adaptChatItem_openAI({ messages, reserveId: true });
return text2TokensLen(getOpenAiEncMap()[model], getChatGPTEncodingText(adaptMessages, model));
}