perf: keys

This commit is contained in:
archer
2023-06-23 17:12:52 +08:00
parent ae1f7a888e
commit 41ada6ecda
18 changed files with 49 additions and 93 deletions

View File

@@ -96,8 +96,7 @@ export async function appKbSearch({
// get vector
const promptVector = await openaiEmbedding({
userId,
input: [prompt.value],
type: 'chat'
input: [prompt.value]
});
// search kb

View File

@@ -29,8 +29,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const vector = await openaiEmbedding({
userId,
input: [text],
type: 'training'
input: [text]
});
const response: any = await PgClient.query(

View File

@@ -21,8 +21,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
if (q) {
return openaiEmbedding({
userId,
input: [q],
type: 'chat'
input: [q]
});
}
return [];

View File

@@ -6,26 +6,24 @@ import { getOpenAIApi } from '@/service/utils/chat/openai';
import { embeddingModel } from '@/constants/model';
import { axiosConfig } from '@/service/utils/tools';
import { pushGenerateVectorBill } from '@/service/events/pushBill';
import { ApiKeyType } from '@/service/utils/auth';
import { OpenAiChatEnum } from '@/constants/model';
type Props = {
input: string[];
type?: ApiKeyType;
};
type Response = number[][];
export default withNextCors(async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
const { userId } = await authUser({ req });
let { input, type } = req.query as Props;
let { input } = req.query as Props;
if (!Array.isArray(input)) {
throw new Error('缺少参数');
}
jsonRes<Response>(res, {
data: await openaiEmbedding({ userId, input, type, mustPay: true })
data: await openaiEmbedding({ userId, input, mustPay: true })
});
} catch (err) {
console.log(err);
@@ -39,14 +37,12 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function openaiEmbedding({
userId,
input,
mustPay = false,
type = 'chat'
mustPay = false
}: { userId: string; mustPay?: boolean } & Props) {
const { userOpenAiKey, systemAuthKey } = await getApiKey({
model: OpenAiChatEnum.GPT35,
userId,
mustPay,
type
mustPay
});
// 获取 chatAPI

View File

@@ -33,7 +33,7 @@ export async function sensitiveCheck({ input }: Props) {
}
const response = await axios({
...axiosConfig(getSystemOpenAiKey('chat')),
...axiosConfig(getSystemOpenAiKey()),
method: 'POST',
url: `/moderations`,
data: {

View File

@@ -7,11 +7,9 @@ import { ChatModelMap, OpenAiChatEnum } from '@/constants/model';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const chatModelList: ChatModelItemType[] = [];
if (global.systemEnv.openAIKeys) {
if (process.env.OPENAIKEY) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT3516k]);
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT35]);
}
if (global.systemEnv.gpt4Key) {
chatModelList.push(ChatModelMap[OpenAiChatEnum.GPT4]);
}

View File

@@ -256,7 +256,7 @@ const SelectFileModal = ({
)}
{/* chunk size */}
{mode === TrainingModeEnum.index && (
<Flex w={'100%'} px={5} alignItems={'center'} mt={4}>
<Flex mt={5}>
<Box w={['70px']} flexShrink={0}>
</Box>