Merge branch 'dev1.1' into beian

This commit is contained in:
Archer
2023-03-12 00:07:24 +08:00
10 changed files with 80 additions and 37 deletions

3
pnpm-lock.yaml generated
View File

@@ -88,7 +88,6 @@ dependencies:
sass: registry.npmmirror.com/sass/1.58.3
sharp: registry.npmmirror.com/sharp/0.31.3
tunnel: registry.npmmirror.com/tunnel/0.0.6
typescript: registry.npmmirror.com/typescript/4.9.5
uuid: registry.npmmirror.com/uuid/9.0.0
zustand: registry.npmmirror.com/zustand/4.3.5_immer@9.0.19+react@18.2.0
@@ -107,6 +106,7 @@ devDependencies:
husky: registry.npmmirror.com/husky/8.0.3
lint-staged: registry.npmmirror.com/lint-staged/13.1.2
prettier: registry.npmmirror.com/prettier/2.8.4
typescript: registry.npmmirror.com/typescript/4.9.5
packages:
registry.npmmirror.com/@aws-crypto/ie11-detection/3.0.0:
@@ -11890,6 +11890,7 @@ packages:
version: 4.9.5
engines: { node: '>=4.2.0' }
hasBin: true
dev: true
registry.npmmirror.com/unbox-primitive/1.0.2:
resolution:

View File

@@ -234,12 +234,10 @@
}
.markdown table th,
.markdown table td {
border: 1px solid #cccccc;
padding: 6px 13px;
}
.markdown table tr {
background-color: #ffffff;
border-top: 1px solid #cccccc;
}
.markdown table tr:nth-child(2n) {
background-color: #f0f0f0;
@@ -379,4 +377,48 @@
text-decoration: underline;
color: var(--chakra-colors-blue-600);
}
table {
border-collapse: separate;
border-spacing: 0px;
thead tr:first-child th {
border-bottom-width: 1px;
border-left-width: 1px;
border-top-width: 1px;
border-color: #ccc;
background-color: rgba(236, 236, 241, 0.2);
overflow: hidden;
&:first-child {
border-top-left-radius: 0.375rem;
}
&:last-child {
border-right-width: 1px;
border-top-right-radius: 0.375rem;
}
}
td {
border-bottom-width: 1px;
border-left-width: 1px;
border-color: #ccc;
&:last-of-type {
border-right-width: 1px;
}
}
tbody tr:last-child {
overflow: hidden;
td {
&:first-child {
border-bottom-left-radius: 0.375rem;
}
&:last-child {
border-bottom-right-radius: 0.375rem;
}
}
}
}
}

View File

@@ -3,7 +3,7 @@ import { createParser, ParsedEvent, ReconnectInterval } from 'eventsource-parser
import { connectToDatabase, ChatWindow } from '@/service/mongo';
import type { ModelType } from '@/types/model';
import { getOpenAIApi, authChat } from '@/service/utils/chat';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
import { ChatCompletionRequestMessage, ChatCompletionRequestMessageRoleEnum } from 'openai';
import { ChatItemType } from '@/types/chat';
import { openaiError } from '@/service/errorCode';
@@ -61,6 +61,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
});
// 获取 chatAPI
const chatAPI = getOpenAIApi(userApiKey);
const chatResponse = await chatAPI.createChatCompletion(
{
model: model.service.chatModel,
@@ -72,7 +73,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
{
timeout: 20000,
responseType: 'stream',
httpsAgent: openaiProxy?.httpsAgent
httpsAgent
}
);
console.log(
@@ -123,9 +124,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
throw new Error('错误了');
}
} catch (err: any) {
// console.log('error->', err?.response, '===');
console.log('error->', err?.response, '===');
let errorText = 'OpenAI 服务器访问超时';
if (err.code === 'ECONNRESET') {
if (err.code === 'ECONNRESET' || err?.response?.status === 502) {
errorText = '服务器代理出错';
} else if (err?.response?.statusText && openaiError[err.response.statusText]) {
errorText = openaiError[err.response.statusText];

View File

@@ -5,7 +5,7 @@ import { connectToDatabase, Chat } from '@/service/mongo';
import type { ModelType } from '@/types/model';
import { getOpenAIApi } from '@/service/utils/chat';
import { ChatItemType } from '@/types/chat';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
/* 发送提示词 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -66,7 +66,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
presence_penalty: 0.6,
stop: ['###']
},
openaiProxy
{
httpsAgent
}
);
const responseMessage = response.data.choices[0]?.text;

View File

@@ -5,7 +5,7 @@ import { authToken, getUserOpenaiKey } from '@/service/utils/tools';
import { TrainingStatusEnum } from '@/constants/model';
import { getOpenAIApi } from '@/service/utils/chat';
import { TrainingItemType } from '@/types/training';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
/* 获取我的模型 */
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
@@ -47,12 +47,12 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
if (training) {
const openai = getOpenAIApi(await getUserOpenaiKey(userId));
// 获取训练记录
const tuneRecord = await openai.retrieveFineTune(training.tuneId, openaiProxy);
const tuneRecord = await openai.retrieveFineTune(training.tuneId, { httpsAgent });
// 删除训练文件
openai.deleteFile(tuneRecord.data.training_files[0].id, openaiProxy);
openai.deleteFile(tuneRecord.data.training_files[0].id, { httpsAgent });
// 取消训练
openai.cancelFineTune(training.tuneId, openaiProxy);
openai.cancelFineTune(training.tuneId, { httpsAgent });
}
// 删除对应训练记录

View File

@@ -9,7 +9,7 @@ import fs from 'fs';
import type { ModelType } from '@/types/model';
import type { OpenAIApi } from 'openai';
import { ModelStatusEnum, TrainingStatusEnum } from '@/constants/model';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
// 关闭next默认的bodyParser处理方式
export const config = {

View File

@@ -7,7 +7,7 @@ import type { ModelType } from '@/types/model';
import { TrainingItemType } from '@/types/training';
import { ModelStatusEnum, TrainingStatusEnum } from '@/constants/model';
import { OpenAiTuneStatusEnum } from '@/service/constants/training';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
/* 更新训练状态 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
@@ -46,11 +46,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const openai = getOpenAIApi(await getUserOpenaiKey(userId));
// 获取 openai 的训练情况
const { data } = await openai.retrieveFineTune(training.tuneId, openaiProxy);
const { data } = await openai.retrieveFineTune(training.tuneId, { httpsAgent });
if (data.status === OpenAiTuneStatusEnum.succeeded) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, openaiProxy);
openai.deleteFile(data.training_files[0].id, { httpsAgent });
// 更新模型
await Model.findByIdAndUpdate(modelId, {
@@ -74,7 +74,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
if (data.status === OpenAiTuneStatusEnum.cancelled) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, openaiProxy);
openai.deleteFile(data.training_files[0].id, { httpsAgent });
// 更新模型
await Model.findByIdAndUpdate(modelId, {

View File

@@ -10,7 +10,7 @@ import fs from 'fs';
import type { ModelType } from '@/types/model';
import type { OpenAIApi } from 'openai';
import { ModelStatusEnum, TrainingStatusEnum } from '@/constants/model';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
// 关闭next默认的bodyParser处理方式
export const config = {
@@ -73,7 +73,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// @ts-ignore
fs.createReadStream(file.filepath),
'fine-tune',
openaiProxy
{ httpsAgent }
);
uploadFileId = uploadRes.data.id; // 记录上传文件的 ID
@@ -84,7 +84,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
model: trainingType,
suffix: model.name
},
openaiProxy
{ httpsAgent }
);
trainId = trainRes.data.id; // 记录训练 ID
@@ -114,9 +114,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
// @ts-ignore
if (openai) {
// @ts-ignore
uploadFileId && openai.deleteFile(uploadFileId, openaiProxy);
uploadFileId && openai.deleteFile(uploadFileId, { httpsAgent });
// @ts-ignore
trainId && openai.cancelFineTune(trainId, openaiProxy);
trainId && openai.cancelFineTune(trainId, { httpsAgent });
}
jsonRes(res, {

View File

@@ -8,7 +8,7 @@ import { getOpenAIApi } from '@/service/utils/chat';
import { getUserOpenaiKey } from '@/service/utils/tools';
import { OpenAiTuneStatusEnum } from '@/service/constants/training';
import { sendTrainSucceed } from '@/service/utils/sendEmail';
import { openaiProxy } from '@/service/utils/tools';
import { httpsAgent } from '@/service/utils/tools';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
@@ -23,10 +23,10 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const response = await Promise.all(
trainingRecords.map(async (item) => {
const { data } = await openai.retrieveFineTune(item.tuneId, openaiProxy);
const { data } = await openai.retrieveFineTune(item.tuneId, { httpsAgent });
if (data.status === OpenAiTuneStatusEnum.succeeded) {
// 删除训练文件
openai.deleteFile(data.training_files[0].id, openaiProxy);
openai.deleteFile(data.training_files[0].id, { httpsAgent });
const model = await Model.findById(item.modelId).populate({
path: 'userId',

View File

@@ -49,15 +49,12 @@ export const getUserOpenaiKey = async (userId: string) => {
};
/* 代理 */
export const openaiProxy: any =
process.env.AXIOS_PROXY_PORT && process.env.AXIOS_PROXY_HOST
? {
httpsAgent: tunnel.httpsOverHttp({
export const httpsAgent =
process.env.AXIOS_PROXY_HOST && process.env.AXIOS_PROXY_PORT
? tunnel.httpsOverHttp({
proxy: {
host: process.env.AXIOS_PROXY_HOST,
port: +process.env.AXIOS_PROXY_PORT
}
}),
proxy: false
}
})
: undefined;