feat: user openai account

This commit is contained in:
archer
2023-07-28 13:29:06 +08:00
parent dfda5285bd
commit fb8635a951
9 changed files with 75 additions and 39 deletions

View File

@@ -32,7 +32,8 @@ const OpenAIAccountModal = ({
<MyModal isOpen onClose={onClose} title={t('user.OpenAI Account Setting')}>
<ModalBody>
<Box fontSize={'sm'} color={'myGray.500'}>
API
Openai Chat API
</Box>
<Flex alignItems={'center'} mt={5}>
<Box flex={'0 0 65px'}>API Key:</Box>

View File

@@ -42,13 +42,14 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
await connectToDatabase();
/* user auth */
const { userId } = await authUser({ req });
const { userId, user } = await authUser({ req, authBalance: true });
/* start process */
const { responseData } = await dispatchModules({
res,
modules: modules,
variables,
user,
params: {
history: gptMessage2ChatType(history),
userChatInput: prompt

View File

@@ -24,6 +24,8 @@ import { AppModuleItemType, RunningModuleItemType } from '@/types/app';
import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
import { ChatHistoryItemResType } from '@/types/chat';
import { UserModelSchema } from '@/types/mongoSchema';
import { getAIChatApi } from '@/service/ai/openai';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
type FastGptWebChatProps = {
@@ -69,6 +71,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
/* user auth */
const {
user,
userId,
appId: authAppid,
authType
@@ -76,7 +79,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
? authShareChat({
shareId
})
: authUser({ req }));
: authUser({ req, authBalance: true }));
if (!user) {
throw new Error('Account is error');
}
if (authType !== 'token') {
user.openaiAccount = undefined;
}
appId = appId ? appId : authAppid;
if (!appId) {
@@ -108,6 +118,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
const { responseData, answerText } = await dispatchModules({
res,
modules: app.modules,
user,
variables,
params: {
history: prompts,
@@ -182,7 +193,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
responseData,
id: chatId || '',
model: '',
usage: { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 },
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
choices: [
{
message: [{ role: 'assistant', content: answerText }],
@@ -217,12 +228,14 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
export async function dispatchModules({
res,
modules,
user,
params = {},
variables = {},
stream = false
}: {
res: NextApiResponse;
modules: AppModuleItemType[];
user?: UserModelSchema;
params?: Record<string, any>;
variables?: Record<string, any>;
stream?: boolean;
@@ -304,6 +317,7 @@ export async function dispatchModules({
const props: Record<string, any> = {
res,
stream,
userOpenaiAccount: user?.openaiAccount,
...params
};

View File

@@ -5,12 +5,12 @@ import { User } from '@/service/models/user';
import { connectToDatabase } from '@/service/mongo';
import { authUser } from '@/service/utils/auth';
import { UserUpdateParams } from '@/types/user';
import { getAIChatApi, openaiBaseUrl } from '@/service/ai/openai';
import { axiosConfig, getAIChatApi, openaiBaseUrl } from '@/service/ai/openai';
/* 更新一些基本信息 */
/* update user info */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
let { avatar, openaiAccount } = req.body as UserUpdateParams;
const { avatar, openaiAccount } = req.body as UserUpdateParams;
const { userId } = await authUser({ req, authToken: true });
@@ -19,17 +19,21 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
// auth key
if (openaiAccount?.key) {
console.log('auth user openai key', openaiAccount?.key);
const baseUrl = openaiAccount?.baseUrl || openaiBaseUrl;
openaiAccount.baseUrl = baseUrl;
const chatAPI = getAIChatApi({
base: openaiAccount?.baseUrl || openaiBaseUrl,
apikey: openaiAccount?.key
});
const chatAPI = getAIChatApi(openaiAccount);
const response = await chatAPI.createChatCompletion({
model: 'gpt-3.5-turbo',
max_tokens: 1,
messages: [{ role: 'user', content: 'hi' }]
});
const response = await chatAPI.createChatCompletion(
{
model: 'gpt-3.5-turbo',
max_tokens: 1,
messages: [{ role: 'user', content: 'hi' }]
},
{
...axiosConfig(openaiAccount)
}
);
if (!response?.data?.choices?.[0]?.message?.content) {
throw new Error(JSON.stringify(response?.data));
}
@@ -42,7 +46,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
},
{
...(avatar && { avatar }),
...(openaiAccount && { openaiAccount })
openaiAccount: openaiAccount?.key ? openaiAccount : null
}
);

View File

@@ -1,3 +1,4 @@
import { UserModelSchema } from '@/types/mongoSchema';
import { Configuration, OpenAIApi } from 'openai';
export const openaiBaseUrl = 'https://api.openai.com/v1';
@@ -5,22 +6,22 @@ export const baseUrl = process.env.ONEAPI_URL || process.env.OPENAI_BASE_URL ||
export const systemAIChatKey = process.env.ONEAPI_KEY || process.env.OPENAIKEY || '';
export const getAIChatApi = (props?: { base?: string; apikey?: string }) => {
export const getAIChatApi = (props?: UserModelSchema['openaiAccount']) => {
return new OpenAIApi(
new Configuration({
basePath: props?.base || baseUrl,
apiKey: props?.apikey || systemAIChatKey
basePath: props?.baseUrl || baseUrl,
apiKey: props?.key || systemAIChatKey
})
);
};
/* openai axios config */
export const axiosConfig = (props?: { base?: string; apikey?: string }) => {
export const axiosConfig = (props?: UserModelSchema['openaiAccount']) => {
return {
baseURL: props?.base || baseUrl, // 此处仅对非 npm 模块有效
baseURL: props?.baseUrl || baseUrl, // 此处仅对非 npm 模块有效
httpsAgent: global.httpsAgent,
headers: {
Authorization: `Bearer ${props?.apikey || systemAIChatKey}`,
Authorization: `Bearer ${props?.key || systemAIChatKey}`,
auth: process.env.OPENAI_BASE_URL_AUTH || ''
}
};

View File

@@ -19,7 +19,7 @@ export const pushTaskBill = async ({
shareId?: string;
response: ChatHistoryItemResType[];
}) => {
const total = response.reduce((sum, item) => sum + item.price, 0);
const total = response.reduce((sum, item) => sum + item.price, 0) || 1;
await Promise.allSettled([
Bill.create({

View File

@@ -5,12 +5,15 @@ import { ChatModuleEnum, ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/c
import { getAIChatApi, axiosConfig } from '@/service/ai/openai';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { countModelPrice } from '@/service/events/pushBill';
import { UserModelSchema } from '@/types/mongoSchema';
import { getModel } from '@/service/utils/data';
export type CQProps = {
systemPrompt?: string;
history?: ChatItemType[];
userChatInput: string;
agents: ClassifyQuestionAgentItemType[];
userOpenaiAccount: UserModelSchema['openaiAccount'];
};
export type CQResponse = {
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
@@ -23,7 +26,7 @@ const maxTokens = 2000;
/* request openai chat */
export const dispatchClassifyQuestion = async (props: Record<string, any>): Promise<CQResponse> => {
const { agents, systemPrompt, history = [], userChatInput } = props as CQProps;
const { agents, systemPrompt, history = [], userChatInput, userOpenaiAccount } = props as CQProps;
const messages: ChatItemType[] = [
...(systemPrompt
@@ -63,7 +66,7 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
required: ['type']
}
};
const chatAPI = getAIChatApi();
const chatAPI = getAIChatApi(userOpenaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -74,7 +77,7 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
functions: [agentFunction]
},
{
...axiosConfig()
...axiosConfig(userOpenaiAccount)
}
);
@@ -88,8 +91,8 @@ export const dispatchClassifyQuestion = async (props: Record<string, any>): Prom
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.CQ,
price: countModelPrice({ model: agentModel, tokens }),
model: agentModel,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model: agentModel, tokens }),
model: getModel(agentModel)?.name || agentModel,
tokens,
cqList: agents,
cqResult: result.value

View File

@@ -14,6 +14,7 @@ import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/events/pushBill';
import { ChatModelItemType } from '@/types/model';
import { UserModelSchema } from '@/types/mongoSchema';
export type ChatProps = {
res: NextApiResponse;
@@ -26,6 +27,7 @@ export type ChatProps = {
quoteQA?: QuoteItemType[];
systemPrompt?: string;
limitPrompt?: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
};
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
@@ -45,7 +47,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
quoteQA = [],
userChatInput,
systemPrompt = '',
limitPrompt = ''
limitPrompt = '',
userOpenaiAccount
} = props as ChatProps;
// temperature adapt
@@ -77,7 +80,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
// FastGpt temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const chatAPI = getAIChatApi();
const chatAPI = getAIChatApi(userOpenaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -92,7 +95,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
{
timeout: stream ? 60000 : 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig()
...axiosConfig(userOpenaiAccount)
}
);
@@ -136,7 +139,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
moduleName: ChatModuleEnum.AIChat,
price: countModelPrice({ model, tokens: totalTokens }),
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest } from 'next';
import jwt from 'jsonwebtoken';
import Cookie from 'cookie';
import { App, OpenApi, User, OutLink, KB } from '../mongo';
import type { AppSchema } from '@/types/mongoSchema';
import type { AppSchema, UserModelSchema } from '@/types/mongoSchema';
import { formatPrice } from '@/utils/user';
import { ERROR_ENUM } from '../errorCode';
@@ -37,7 +37,7 @@ export const authBalanceByUid = async (uid: string) => {
return Promise.reject(ERROR_ENUM.unAuthorization);
}
if (!user.openaiKey && formatPrice(user.balance) <= 0) {
if (user.balance <= 0) {
return Promise.reject(ERROR_ENUM.insufficientQuota);
}
return user;
@@ -151,14 +151,17 @@ export const authUser = async ({
}
// balance check
if (authBalance) {
await authBalanceByUid(uid);
}
const user = await (() => {
if (authBalance) {
return authBalanceByUid(uid);
}
})();
return {
userId: uid,
appId,
authType
authType,
user
};
};
@@ -217,7 +220,13 @@ export const authShareChat = async ({ shareId }: { shareId: string }) => {
return Promise.reject('分享链接已失效');
}
const uid = String(shareChat.userId);
// authBalance
const user = await authBalanceByUid(uid);
return {
user,
userId: String(shareChat.userId),
appId: String(shareChat.appId),
authType: 'token' as AuthType