perf: logger (#186)

* feat: finish response

* perf: logger

* docs

* perf: log

* docs
This commit is contained in:
Archer
2023-08-17 23:19:19 +08:00
committed by GitHub
parent 324e4a0e75
commit 40168c56ea
26 changed files with 500 additions and 93 deletions

View File

@@ -182,8 +182,9 @@ export const ChatModule: FlowModuleTemplateType = {
{
key: TaskResponseKeyEnum.answerText,
label: '模型回复',
description: '直接响应,无需配置',
type: FlowOutputItemTypeEnum.hidden,
description: '如果外接了内容,会在回复结束时自动添加\n\n',
valueType: FlowValueTypeEnum.string,
type: FlowOutputItemTypeEnum.source,
targets: []
},
{
@@ -285,7 +286,16 @@ export const AnswerModule: FlowModuleTemplateType = {
'可以使用 \\n 来实现换行。也可以通过外部模块输入实现回复,外部模块输入时会覆盖当前填写的内容'
}
],
outputs: []
outputs: [
{
key: 'finish',
label: '回复结束',
description: '回复完成后触发',
valueType: FlowValueTypeEnum.boolean,
type: FlowOutputItemTypeEnum.source,
targets: []
}
]
};
export const TFSwitchModule: FlowModuleTemplateType = {
logo: '',

View File

@@ -2,7 +2,7 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { authUser, authApp, authShareChat, AuthUserTypeEnum } from '@/service/utils/auth';
import { sseErrRes, jsonRes } from '@/service/response';
import { withNextCors } from '@/service/utils/tools';
import { addLog, withNextCors } from '@/service/utils/tools';
import { ChatRoleEnum, ChatSourceEnum, sseResponseEventEnum } from '@/constants/chat';
import {
dispatchHistory,
@@ -181,7 +181,7 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
});
}
console.log(`finish time: ${(Date.now() - startTime) / 1000}s`);
addLog.info(`completions running time: ${(Date.now() - startTime) / 1000}s`);
if (stream) {
sseResponse({
@@ -351,6 +351,7 @@ export async function dispatchModules({
res,
stream,
detail,
outputs: module.outputs,
userOpenaiAccount: user?.openaiAccount,
...params
};

View File

@@ -4,6 +4,7 @@ import NodeCard from '../modules/NodeCard';
import { FlowModuleItemType } from '@/types/flow';
import Container from '../modules/Container';
import RenderInput from '../render/RenderInput';
import RenderOutput from '../render/RenderOutput';
const NodeAnswer = ({ data }: NodeProps<FlowModuleItemType>) => {
const { moduleId, inputs, outputs, onChangeNode } = data;
@@ -11,6 +12,7 @@ const NodeAnswer = ({ data }: NodeProps<FlowModuleItemType>) => {
<NodeCard minW={'400px'} {...data}>
<Container borderTop={'2px solid'} borderTopColor={'myGray.200'}>
<RenderInput moduleId={moduleId} onChangeNode={onChangeNode} flowInputList={inputs} />
<RenderOutput onChangeNode={onChangeNode} moduleId={moduleId} flowOutputList={outputs} />
</Container>
</NodeCard>
);

View File

@@ -43,7 +43,7 @@ export async function generateQA(): Promise<any> {
// task preemption
if (!data) {
reduceQueue();
global.qaQueueLen <= 0 && console.log(`没有需要【QA】的数据, ${global.qaQueueLen}`);
global.qaQueueLen <= 0 && console.log(`【QA】任务完成`);
return;
}

View File

@@ -44,7 +44,7 @@ export async function generateVector(): Promise<any> {
// task preemption
if (!data) {
reduceQueue();
global.vectorQueueLen <= 0 && console.log(`没有需要【索引】的数据, ${global.vectorQueueLen}`);
global.vectorQueueLen <= 0 && console.log(`【索引】任务完成`);
return;
}

View File

@@ -3,6 +3,7 @@ import { BillSourceEnum } from '@/constants/user';
import { getModel } from '../utils/data';
import { ChatHistoryItemResType } from '@/types/chat';
import { formatPrice } from '@/utils/user';
import { addLog } from '../utils/tools';
export const pushTaskBill = async ({
appName,
@@ -48,7 +49,11 @@ export const pushTaskBill = async ({
: [])
]);
console.log('finish bill:', formatPrice(total));
addLog.info(`finish completions`, {
source,
userId,
price: formatPrice(total)
});
};
export const updateShareChatBill = async ({
@@ -66,8 +71,8 @@ export const updateShareChatBill = async ({
lastTime: new Date()
}
);
} catch (error) {
console.log('update shareChat error', error);
} catch (err) {
addLog.error('update shareChat error', { err });
}
};
@@ -82,7 +87,7 @@ export const pushSplitDataBill = async ({
totalTokens: number;
appName: string;
}) => {
console.log(`splitData generate success. token len: ${totalTokens}.`);
addLog.info('splitData generate success', { totalTokens });
let billId;
@@ -107,8 +112,8 @@ export const pushSplitDataBill = async ({
await User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('创建账单失败:', error);
} catch (err) {
addLog.error('Create completions bill error', { err });
billId && Bill.findByIdAndDelete(billId);
}
};
@@ -156,8 +161,8 @@ export const pushGenerateVectorBill = async ({
await User.findByIdAndUpdate(userId, {
$inc: { balance: -total }
});
} catch (error) {
console.log('创建账单失败:', error);
} catch (err) {
addLog.error('Create generateVector bill error', { err });
billId && Bill.findByIdAndDelete(billId);
}
} catch (error) {

View File

@@ -17,6 +17,7 @@ import { ChatModelItemType } from '@/types/model';
import { UserModelSchema } from '@/types/mongoSchema';
import { textCensor } from '@/service/api/plugins';
import { ChatCompletionRequestMessageRoleEnum } from 'openai';
import { AppModuleItemType } from '@/types/app';
export type ChatProps = {
res: NextApiResponse;
@@ -31,6 +32,7 @@ export type ChatProps = {
systemPrompt?: string;
limitPrompt?: string;
userOpenaiAccount: UserModelSchema['openaiAccount'];
outputs: AppModuleItemType['outputs'];
};
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
@@ -52,8 +54,12 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
userChatInput,
systemPrompt = '',
limitPrompt = '',
userOpenaiAccount
userOpenaiAccount,
outputs
} = props as ChatProps;
if (!userChatInput) {
return Promise.reject('Question is empty');
}
// temperature adapt
const modelConstantsData = getChatModel(model);
@@ -142,6 +148,8 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
messages: completeMessages
});
targetResponse({ res, detail, outputs });
return {
answerText: answer,
totalTokens,
@@ -304,6 +312,28 @@ function getMaxTokens({
};
}
function targetResponse({
res,
outputs,
detail
}: {
res: NextApiResponse;
outputs: AppModuleItemType['outputs'];
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === TaskResponseKeyEnum.answerText)?.targets || [];
if (targets.length === 0) return;
sseResponse({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
}
async function streamResponse({
res,
detail,

View File

@@ -11,6 +11,7 @@ export type AnswerProps = {
};
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
@@ -27,6 +28,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
}
return {
[TaskResponseKeyEnum.answerText]: text
[TaskResponseKeyEnum.answerText]: text,
finish: true
};
};

View File

@@ -7,6 +7,8 @@ import { PRICE_SCALE } from '@/constants/common';
import { connectPg, PgClient } from './pg';
import { createHashPassword } from '@/utils/tools';
import { PgTrainingTableName } from '@/constants/plugin';
import { createLogger, format, transports } from 'winston';
import 'winston-mongodb';
/**
* connect MongoDB and init data
@@ -32,6 +34,9 @@ export async function connectToDatabase(): Promise<void> {
});
}
// logger
initLogger();
// init function
getInitConfig();
@@ -39,7 +44,6 @@ export async function connectToDatabase(): Promise<void> {
mongoose.set('strictQuery', true);
global.mongodb = await mongoose.connect(process.env.MONGODB_URI as string, {
bufferCommands: true,
dbName: process.env.MONGODB_NAME,
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
minPoolSize: 2
@@ -57,6 +61,37 @@ export async function connectToDatabase(): Promise<void> {
startQueue();
}
function initLogger() {
global.logger = createLogger({
transports: [
new transports.MongoDB({
db: process.env.MONGODB_URI as string,
collection: 'server_logs',
options: {
useUnifiedTopology: true
},
cappedSize: 500000000,
tryReconnect: true,
metaKey: 'meta',
format: format.combine(format.timestamp(), format.json())
}),
new transports.Console({
format: format.combine(
format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
format.printf((info) => {
if (info.level === 'error') {
console.log(info.meta);
return `${info.level}: ${[info.timestamp]}: ${info.message}`;
}
return `${info.level}: ${[info.timestamp]}: ${info.message}${
info.meta ? `: ${JSON.stringify(info.meta)}` : ''
}`;
})
)
})
]
});
}
async function initRootUser() {
try {
const rootUser = await User.findOne({

View File

@@ -8,13 +8,9 @@ export const connectPg = async () => {
}
global.pgClient = new Pool({
host: process.env.PG_HOST,
port: process.env.PG_PORT ? +process.env.PG_PORT : 5432,
user: process.env.PG_USER,
password: process.env.PG_PASSWORD,
database: process.env.PG_DB_NAME,
connectionString: process.env.PG_URL,
max: Number(process.env.DB_MAX_LINK || 5),
idleTimeoutMillis: 30000,
keepAlive: true,
connectionTimeoutMillis: 5000
});

View File

@@ -7,7 +7,7 @@ import {
ERROR_RESPONSE,
ERROR_ENUM
} from './errorCode';
import { clearCookie, sseResponse } from './utils/tools';
import { clearCookie, sseResponse, addLog } from './utils/tools';
export interface ResponseType<T = any> {
code: number;
@@ -52,7 +52,24 @@ export const jsonRes = <T = any>(
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
console.log(error);
addLog.error(msg, {
message: error.message,
stack: error.stack,
...(error.config && {
config: {
headers: error.config.headers,
url: error.config.url,
data: error.config.data
}
}),
...(error.response && {
response: {
status: error.response.status,
statusText: error.response.statusText
}
})
});
}
res.status(code).json({
@@ -92,7 +109,24 @@ export const sseErrRes = (res: NextApiResponse, error: any) => {
} else if (openaiError[error?.response?.statusText]) {
msg = openaiError[error.response.statusText];
}
console.log('sse error => ', error);
addLog.error(`sse error: ${msg}`, {
message: error.message,
stack: error.stack,
...(error.config && {
config: {
headers: error.config.headers,
url: error.config.url,
data: error.config.data
}
}),
...(error.response && {
response: {
status: error.response.status,
statusText: error.response.statusText
}
})
});
sseResponse({
res,

View File

@@ -65,6 +65,7 @@ export function withNextCors(handler: NextApiHandler): NextApiHandler {
};
}
/* start task */
export const startQueue = () => {
for (let i = 0; i < global.systemEnv.qaMaxProcess; i++) {
generateQA();
@@ -87,3 +88,13 @@ export const sseResponse = ({
event && res.write(`event: ${event}\n`);
res.write(`data: ${data}\n\n`);
};
/* add logger */
export const addLog = {
info: (msg: string, obj?: Record<string, any>) => {
global.logger.info(msg, { meta: obj });
},
error: (msg: string, obj?: Record<string, any>) => {
global.logger.error(msg, { meta: obj });
}
};

View File

@@ -2,6 +2,7 @@ import type { Mongoose } from 'mongoose';
import type { Agent } from 'http';
import type { Pool } from 'pg';
import type { Tiktoken } from '@dqbd/tiktoken';
import type { Logger } from 'winston';
import { ChatModelItemType, QAModelItemType, VectorModelItemType } from './model';
export type PagingData<T> = {
@@ -55,6 +56,9 @@ declare global {
var qaQueueLen: number;
var vectorQueueLen: number;
var OpenAiEncMap: Tiktoken;
var logger: Logger;
var sendInformQueue: (() => Promise<void>)[];
var sendInformQueueLen: number;