feat: openapi v2 chat

This commit is contained in:
archer
2023-06-18 19:59:37 +08:00
parent 2b93ae2d00
commit ee9afa310a
27 changed files with 790 additions and 585 deletions

View File

@@ -52,7 +52,7 @@ const ChatSchema = new Schema({
},
value: {
type: String,
required: true
default: ''
},
quote: {
type: [

View File

@@ -3,15 +3,16 @@ import jwt from 'jsonwebtoken';
import Cookie from 'cookie';
import { Chat, Model, OpenApi, User, ShareChat, KB } from '../mongo';
import type { ModelSchema } from '@/types/mongoSchema';
import type { ChatItemSimpleType } from '@/types/chat';
import type { ChatItemType } from '@/types/chat';
import mongoose from 'mongoose';
import { ClaudeEnum, defaultModel, embeddingModel, EmbeddingModelType } from '@/constants/model';
import { defaultModel } from '@/constants/model';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export type ApiKeyType = 'training' | 'chat';
export type AuthType = 'token' | 'root' | 'apikey';
export const parseCookie = (cookie?: string): Promise<string> => {
return new Promise((resolve, reject) => {
@@ -39,13 +40,11 @@ export const parseCookie = (cookie?: string): Promise<string> => {
export const authUser = async ({
req,
authToken = false,
authOpenApi = false,
authRoot = false,
authBalance = false
}: {
req: NextApiRequest;
authToken?: boolean;
authOpenApi?: boolean;
authRoot?: boolean;
authBalance?: boolean;
}) => {
@@ -71,6 +70,36 @@ export const authUser = async ({
return Promise.reject(error);
}
};
const parseAuthorization = async (authorization?: string) => {
if (!authorization) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// Bearer fastgpt-xxxx-appId
const auth = authorization.split(' ')[1];
if (!auth) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
const { apiKey, appId } = await (async () => {
const arr = auth.split('-');
if (arr.length !== 3) {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
return {
apiKey: `${arr[0]}-${arr[1]}`,
appId: arr[2]
};
})();
// auth apiKey
const uid = await parseOpenApiKey(apiKey);
return {
uid,
appId
};
};
const parseRootKey = async (rootKey?: string, userId = '') => {
if (!rootKey || !process.env.ROOT_KEY || rootKey !== process.env.ROOT_KEY) {
return Promise.reject(ERROR_ENUM.unAuthorization);
@@ -78,31 +107,43 @@ export const authUser = async ({
return userId;
};
const { cookie, apikey, rootkey, userid } = (req.headers || {}) as {
const { cookie, apikey, rootkey, userid, authorization } = (req.headers || {}) as {
cookie?: string;
apikey?: string;
rootkey?: string;
userid?: string;
authorization?: string;
};
let uid = '';
let appId = '';
let authType: AuthType = 'token';
if (authToken) {
uid = await parseCookie(cookie);
} else if (authOpenApi) {
uid = await parseOpenApiKey(apikey);
authType = 'token';
} else if (authRoot) {
uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else if (cookie) {
uid = await parseCookie(cookie);
authType = 'token';
} else if (apikey) {
uid = await parseOpenApiKey(apikey);
authType = 'apikey';
} else if (authorization) {
const authResponse = await parseAuthorization(authorization);
uid = authResponse.uid;
appId = authResponse.appId;
authType = 'apikey';
} else if (rootkey) {
uid = await parseRootKey(rootkey, userid);
authType = 'root';
} else {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
// balance check
if (authBalance) {
const user = await User.findById(uid);
if (!user) {
@@ -115,7 +156,9 @@ export const authUser = async ({
}
return {
userId: uid
userId: uid,
appId,
authType
};
};
@@ -173,15 +216,15 @@ export const getApiKey = async ({
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getGpt4Key() as string
},
[ClaudeEnum.Claude]: {
userOpenAiKey: '',
systemAuthKey: process.env.CLAUDE_KEY as string
}
};
if (!keyMap[model]) {
return Promise.reject('App model is exists');
}
// 有自己的key
if (!mustPay && keyMap[model]?.userOpenAiKey) {
if (!mustPay && keyMap[model].userOpenAiKey) {
return {
user,
userOpenAiKey: keyMap[model].userOpenAiKey,
@@ -240,7 +283,7 @@ export const authModel = async ({
return {
model,
showModelDetail: model.share.isShareDetail || userId === String(model.userId)
showModelDetail: userId === String(model.userId)
};
};
@@ -277,7 +320,7 @@ export const authChat = async ({
});
// 聊天内容
let content: ChatItemSimpleType[] = [];
let content: ChatItemType[] = [];
if (chatId) {
// 获取 chat 数据
@@ -336,28 +379,9 @@ export const authShareChat = async ({
});
}
const modelId = String(shareChat.modelId);
const userId = String(shareChat.userId);
// 获取 model 数据
const { model, showModelDetail } = await authModel({
modelId,
userId,
authOwner: false,
reserveDetail: true
});
// 获取 user 的 apiKey
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: model.chat.chatModel,
userId
});
return {
userOpenAiKey,
systemAuthKey,
userId,
model,
showModelDetail
userId: String(shareChat.userId),
appId: String(shareChat.modelId),
authType: 'token' as AuthType
};
};

View File

@@ -1,35 +1,37 @@
import { ChatItemSimpleType } from '@/types/chat';
import { ChatItemType } from '@/types/chat';
import { modelToolMap } from '@/utils/plugin';
import type { ChatModelType } from '@/constants/model';
import { ChatRoleEnum } from '@/constants/chat';
import { OpenAiChatEnum, ClaudeEnum } from '@/constants/model';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '../tools';
import { OpenAiChatEnum } from '@/constants/model';
import { chatResponse, openAiStreamResponse } from './openai';
import { claudChat, claudStreamResponse } from './claude';
import type { NextApiResponse } from 'next';
import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser';
import { textAdaptGptResponse } from '@/utils/adapt';
export type ChatCompletionType = {
apiKey: string;
temperature: number;
messages: ChatItemSimpleType[];
messages: ChatItemType[];
chatId?: string;
[key: string]: any;
};
export type ChatCompletionResponseType = {
streamResponse: any;
responseMessages: ChatItemSimpleType[];
responseMessages: ChatItemType[];
responseText: string;
totalTokens: number;
};
export type StreamResponseType = {
chatResponse: any;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
res: NextApiResponse;
[key: string]: any;
};
export type StreamResponseReturnType = {
responseContent: string;
totalTokens: number;
finishMessages: ChatItemSimpleType[];
finishMessages: ChatItemType[];
};
export const modelServiceToolMap: Record<
@@ -74,10 +76,6 @@ export const modelServiceToolMap: Record<
model: OpenAiChatEnum.GPT432k,
...data
})
},
[ClaudeEnum.Claude]: {
chatCompletion: claudChat,
streamResponse: claudStreamResponse
}
};
@@ -95,11 +93,11 @@ export const ChatContextFilter = ({
maxTokens
}: {
model: ChatModelType;
prompts: ChatItemSimpleType[];
prompts: ChatItemType[];
maxTokens: number;
}) => {
const systemPrompts: ChatItemSimpleType[] = [];
const chatPrompts: ChatItemSimpleType[] = [];
const systemPrompts: ChatItemType[] = [];
const chatPrompts: ChatItemType[] = [];
let rawTextLen = 0;
prompts.forEach((item) => {
@@ -107,6 +105,7 @@ export const ChatContextFilter = ({
rawTextLen += val.length;
const data = {
_id: item._id,
obj: item.obj,
value: val
};
@@ -129,7 +128,7 @@ export const ChatContextFilter = ({
});
// 根据 tokens 截断内容
const chats: ChatItemSimpleType[] = [];
const chats: ChatItemType[] = [];
// 从后往前截取对话内容
for (let i = chatPrompts.length - 1; i >= 0; i--) {
@@ -174,3 +173,89 @@ export const resStreamResponse = async ({
return { responseContent, totalTokens, finishMessages };
};
/* stream response */
export const V2_StreamResponse = async ({
model,
res,
chatResponse,
prompts
}: StreamResponseType & {
model: ChatModelType;
}) => {
let responseContent = '';
try {
const onParse = async (e: ParsedEvent | ReconnectInterval) => {
if (e.type !== 'event') return;
const data = e.data;
const { content = '' } = (() => {
try {
const json = JSON.parse(data);
const content: string = json?.choices?.[0].delta.content || '';
responseContent += content;
return { content };
} catch (error) {}
return {};
})();
if (res.closed) return;
if (data === '[DONE]') {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: null,
finish_reason: 'stop'
})
});
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: '[DONE]'
});
} else {
sseResponse({
res,
event: sseResponseEventEnum.answer,
data: textAdaptGptResponse({
text: content
})
});
}
};
try {
const parser = createParser(onParse);
const decoder = new TextDecoder();
for await (const chunk of chatResponse.data as any) {
if (res.closed) {
break;
}
parser.feed(decoder.decode(chunk, { stream: true }));
}
} catch (error) {
console.log('pipe error', error);
}
} catch (error) {
console.log('stream error', error);
}
// count tokens
const finishMessages = prompts.concat({
obj: ChatRoleEnum.AI,
value: responseContent
});
const totalTokens = modelToolMap[model].countTokens({
messages: finishMessages
});
return {
responseContent,
totalTokens,
finishMessages
};
};

View File

@@ -28,13 +28,13 @@ export const chatResponse = async ({
maxTokens: Math.ceil(ChatModelMap[model].contextMaxToken * 0.85)
});
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages });
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
const chatAPI = getOpenAIApi();
const response = await chatAPI.createChatCompletion(
{
model,
temperature: Number(temperature) || 0,
temperature: Number(temperature || 0),
messages: adaptMessages,
frequency_penalty: 0.5, // 越大,重复内容越少
presence_penalty: -0.5, // 越大,越容易出现新内容

View File

@@ -4,6 +4,7 @@ import crypto from 'crypto';
import jwt from 'jsonwebtoken';
import { generateQA } from '../events/generateQA';
import { generateVector } from '../events/generateVector';
import { sseResponseEventEnum } from '@/constants/chat';
/* 密码加密 */
export const hashPassword = (psw: string) => {
@@ -67,3 +68,16 @@ export const startQueue = () => {
generateVector();
}
};
export const sseResponse = ({
res,
event,
data
}: {
res: NextApiResponse;
event?: `${sseResponseEventEnum}`;
data: string;
}) => {
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};