perf: keys

This commit is contained in:
archer
2023-06-23 17:12:52 +08:00
parent ae1f7a888e
commit 41ada6ecda
18 changed files with 49 additions and 93 deletions

View File

@@ -96,8 +96,7 @@ export async function appKbSearch({
// get vector
const promptVector = await openaiEmbedding({
userId,
input: [prompt.value],
type: 'chat'
input: [prompt.value]
});
// search kb

View File

@@ -29,8 +29,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const vector = await openaiEmbedding({
userId,
input: [text],
type: 'training'
input: [text]
});
const response: any = await PgClient.query(

View File

@@ -21,8 +21,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (q) {
return openaiEmbedding({
userId,
input: [q],
type: 'chat'
input: [q]
});
}
return [];

View File

@@ -6,26 +6,24 @@ import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model';
import { axiosConfig } from '@/service/utils/tools';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
import { ApiKeyType } from '@/service/utils/auth';
import { OpenAiChatEnum } from '@/constants/model';
type Props = {
input: string[];
type?: ApiKeyType;
};
type Response = number[][];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input, type } = req.query as Props;
let { input } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, type, mustPay: true })
data: await openaiEmbedding({ userId, input, mustPay: true })
});
} catch (err) {
console.log(err);
@@ -39,14 +37,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function openaiEmbedding({
userId,
input,
mustPay = false,
type = 'chat'
mustPay = false
}: { userId: string; mustPay?: boolean } & Props) {
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: OpenAiChatEnum.GPT35,
userId,
mustPay,
type
mustPay
});
// 获取 chatAPI

View File

@@ -33,7 +33,7 @@ export async function sensitiveCheck({ input }: Props) {
}
const response = await axios({
...axiosConfig(getSystemOpenAiKey('chat')),
...axiosConfig(getSystemOpenAiKey()),
method: 'POST',
url: `/moderations`,
data: {

View File

@@ -7,11 +7,9 @@ import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const chatModelList: ChatModelItemType[] = [];
if (global.systemEnv.openAIKeys) {
if (process.env.OPENAIKEY) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT3516k]);
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
}
if (global.systemEnv.gpt4Key) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT4]);
}

View File

@@ -256,7 +256,7 @@ const SelectFileModal = ({
)}
{/* chunk size */}
{mode === TrainingModeEnum.index && (
<Flex w={'100%'} px={5} alignItems={'center'} mt={4}>
<Flex mt={5}>
<Box w={['70px']} flexShrink={0}>
</Box>

View File

@@ -55,7 +55,6 @@ export async function generateQA(): Promise<any> {
const { systemAuthKey } = await getApiKey({
model: OpenAiChatEnum.GPT35,
userId,
type: 'training',
mustPay: true
});

View File

@@ -58,7 +58,6 @@ export async function generateVector(): Promise<any> {
const vectors = await openaiEmbedding({
input: dataItems.map((item) => item.q),
userId,
type: 'training',
mustPay: true
});

View File

@@ -1,18 +1,6 @@
import { Schema, model, models } from 'mongoose';
const SystemSchema = new Schema({
openAIKeys: {
type: String,
default: ''
},
openAITrainingKeys: {
type: String,
default: ''
},
gpt4Key: {
type: String,
default: ''
},
vectorMaxProcess: {
type: Number,
default: 10

View File

@@ -15,9 +15,6 @@ export async function connectToDatabase(): Promise<void> {
global.qaQueueLen = 0;
global.vectorQueueLen = 0;
global.systemEnv = {
openAIKeys: process.env.OPENAIKEY || '',
openAITrainingKeys: process.env.OPENAI_TRAINING_KEY || '',
gpt4Key: process.env.GPT4KEY || '',
vectorMaxProcess: 10,
qaMaxProcess: 10,
pgIvfflatProbe: 10,

View File

@@ -11,7 +11,6 @@ import { ERROR_ENUM } from '../errorCode';
import { ChatModelType, OpenAiChatEnum } from '@/constants/model';
import { hashPassword } from '@/service/utils/tools';
export type ApiKeyType = 'training' | 'chat';
export type AuthType = 'token' | 'root' | 'apikey';
export const parseCookie = (cookie?: string): Promise<string> => {
@@ -163,37 +162,19 @@ export const authUser = async ({
};
/* random get openai api key */
export const getSystemOpenAiKey = (type: ApiKeyType) => {
const keys = (() => {
if (type === 'training') {
return global.systemEnv.openAITrainingKeys?.split(',') || [];
}
return global.systemEnv.openAIKeys?.split(',') || [];
})();
// 纯字符串类型
const i = Math.floor(Math.random() * keys.length);
return keys[i] || (global.systemEnv.openAIKeys as string);
};
export const getGpt4Key = () => {
const keys = global.systemEnv.gpt4Key?.split(',') || [];
// 纯字符串类型
const i = Math.floor(Math.random() * keys.length);
return keys[i] || (global.systemEnv.openAIKeys as string);
export const getSystemOpenAiKey = () => {
return process.env.OPENAIKEY || '';
};
/* 获取 api 请求的 key */
export const getApiKey = async ({
model,
userId,
mustPay = false,
type = 'chat'
mustPay = false
}: {
model: ChatModelType;
userId: string;
mustPay?: boolean;
type?: ApiKeyType;
}) => {
const user = await User.findById(userId);
if (!user) {
@@ -203,19 +184,19 @@ export const getApiKey = async ({
const keyMap = {
[OpenAiChatEnum.GPT35]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getSystemOpenAiKey(type) as string
systemAuthKey: getSystemOpenAiKey()
},
[OpenAiChatEnum.GPT3516k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getSystemOpenAiKey(type) as string
systemAuthKey: getSystemOpenAiKey()
},
[OpenAiChatEnum.GPT4]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getGpt4Key() as string
systemAuthKey: getSystemOpenAiKey()
},
[OpenAiChatEnum.GPT432k]: {
userOpenAiKey: user.openaiKey || '',
systemAuthKey: getGpt4Key() as string
systemAuthKey: getSystemOpenAiKey()
}
};

View File

@@ -23,9 +23,6 @@ declare global {
var vectorQueueLen: number;
var OpenAiEncMap: Record<string, Tiktoken>;
var systemEnv: {
openAIKeys: string;
openAITrainingKeys: string;
gpt4Key: string;
vectorMaxProcess: number;
qaMaxProcess: number;
pgIvfflatProbe: number;