Add request log and uncatch error tip. (#1531)

This commit is contained in:
Archer
2024-05-20 10:31:44 +08:00
committed by GitHub
parent e35ce2caa0
commit 8e8ceb7439
9 changed files with 50 additions and 31 deletions

View File

@@ -86,4 +86,4 @@ USER nextjs
ENV serverPath=./projects/$name/server.js
ENTRYPOINT ["sh","-c","node ${serverPath}"]
ENTRYPOINT ["sh","-c","node --max-old-space-size=4096 ${serverPath}"]

View File

@@ -1,5 +1,5 @@
import { ChatCompletionRequestMessageRoleEnum } from '../../ai/constants';
import { NodeOutputKeyEnum } from '../constants';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '../constants';
import { FlowNodeTypeEnum } from '../node/constant';
import { StoreNodeItemType } from '../type';
import { StoreEdgeItemType } from '../type/edge';
@@ -8,6 +8,23 @@ import { VARIABLE_NODE_ID } from '../constants';
import { isReferenceValue } from '../utils';
import { ReferenceValueProps } from '../type/io';
export const getMaxHistoryLimitFromNodes = (nodes: StoreNodeItemType[]): number => {
let limit = 10;
nodes.forEach((node) => {
node.inputs.forEach((input) => {
if (
(input.key === NodeInputKeyEnum.history ||
input.key === NodeInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
limit = Math.max(limit, input.value);
}
});
});
return limit * 2;
};
export const initWorkflowEdgeStatus = (edges: StoreEdgeItemType[]): RuntimeEdgeItemType[] => {
return (
edges?.map((edge) => ({

View File

@@ -2,6 +2,7 @@ import { jsonRes } from '../response';
import type { NextApiResponse } from 'next';
import { withNextCors } from './cors';
import { ApiRequestProps } from '../../type/next';
import { addLog } from '../system/log';
export type NextApiHandler<T = any> = (
req: ApiRequestProps,
@@ -11,6 +12,8 @@ export type NextApiHandler<T = any> = (
export const NextEntry = ({ beforeCallback = [] }: { beforeCallback?: Promise<any>[] }) => {
return (...args: NextApiHandler[]): NextApiHandler => {
return async function api(req: ApiRequestProps, res: NextApiResponse) {
const start = Date.now();
addLog.info(`Request start ${req.url}`);
try {
await Promise.all([withNextCors(req, res), ...beforeCallback]);
@@ -20,6 +23,9 @@ export const NextEntry = ({ beforeCallback = [] }: { beforeCallback?: Promise<an
}
const contentType = res.getHeader('Content-Type');
addLog.info(`Request finish ${req.url}, time: ${Date.now() - start}ms`);
if ((!contentType || contentType === 'application/json') && !res.writableFinished) {
return jsonRes(res, {
code: 200,

View File

@@ -20,3 +20,15 @@ export const initFastGPTConfig = (config?: FastGPTConfigFileType) => {
global.whisperModel = config.whisperModel;
global.reRankModels = config.reRankModels;
};
export const systemStartCb = () => {
process.on('uncaughtException', (err) => {
console.error('Uncaught Exception:', err);
// process.exit(1); // 退出进程
});
process.on('unhandledRejection', (reason, promise) => {
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
// process.exit(1); // 退出进程
});
};

View File

@@ -32,7 +32,7 @@ export async function getChatItems({
return { history };
}
/* 临时适配旧的对话记录,清洗完数据后可删除4.30刪除) */
/* 临时适配旧的对话记录 */
export const adaptStringValue = (value: any): ChatItemValueItemType[] => {
if (typeof value === 'string') {
return [

View File

@@ -25,6 +25,7 @@ import { useTranslation } from 'next-i18next';
import { StoreEdgeItemType } from '@fastgpt/global/core/workflow/type/edge';
import {
getDefaultEntryNodeIds,
getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
@@ -57,19 +58,8 @@ const ChatTest = (
const startChat = useCallback(
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
/* get histories */
let historyMaxLen = 6;
nodes.forEach((node) => {
node.inputs.forEach((input) => {
if (
(input.key === NodeInputKeyEnum.history ||
input.key === NodeInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
historyMaxLen = Math.max(historyMaxLen, input.value);
}
});
});
const history = chatList.slice(-(historyMaxLen * 2) - 2, -2);
let historyMaxLen = getMaxHistoryLimitFromNodes(nodes);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData, newVariables } = await streamFetch({

View File

@@ -10,6 +10,7 @@ import type { ChatCompletionCreateParams } from '@fastgpt/global/core/ai/type.d'
import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d';
import {
getDefaultEntryNodeIds,
getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes,
textAdaptGptResponse
@@ -168,11 +169,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
})();
// 1. get and concat history; 2. get app workflow
const limit = getMaxHistoryLimitFromNodes(app.modules);
const [{ history }, { nodes, edges }] = await Promise.all([
getChatItems({
appId: app._id,
chatId,
limit: 30,
limit,
field: `dataId obj value`
}),
getAppLatestVersion(app._id, app)

View File

@@ -4,7 +4,6 @@ import { useTranslation } from 'next-i18next';
import React, { useCallback, useEffect, useRef } from 'react';
import ChatBox from '@/components/ChatBox';
import type { ComponentRef, StartChatFnProps } from '@/components/ChatBox/type.d';
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import { streamFetch } from '@/web/common/api/fetch';
import MyTooltip from '@/components/MyTooltip';
import MyIcon from '@fastgpt/web/components/common/Icon';
@@ -13,6 +12,7 @@ import { checkChatSupportSelectFileByModules } from '@/web/core/chat/utils';
import { AppTypeEnum } from '@fastgpt/global/core/app/constants';
import {
getDefaultEntryNodeIds,
getMaxHistoryLimitFromNodes,
initWorkflowEdgeStatus,
storeNodes2RuntimeNodes
} from '@fastgpt/global/core/workflow/runtime/utils';
@@ -53,19 +53,9 @@ const ChatTest = ({
if (!workflowData) return Promise.reject('workflowData is empty');
/* get histories */
let historyMaxLen = 6;
workflowData?.nodes.forEach((node) => {
node.inputs.forEach((input) => {
if (
(input.key === NodeInputKeyEnum.history ||
input.key === NodeInputKeyEnum.historyMaxAmount) &&
typeof input.value === 'number'
) {
historyMaxLen = Math.max(historyMaxLen, input.value);
}
});
});
const history = chatList.slice(-(historyMaxLen * 2) - 2, -2);
let historyMaxLen = getMaxHistoryLimitFromNodes(workflowData.nodes);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({

View File

@@ -11,6 +11,7 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
import { initGlobal } from './common/system';
import { startMongoWatch } from './common/system/volumnMongoWatch';
import { startTrainingQueue } from './core/dataset/training/utils';
import { systemStartCb } from '@fastgpt/service/common/system/tools';
/**
* connect MongoDB and init data
@@ -21,6 +22,7 @@ export function connectToDatabase(): Promise<void> {
initGlobal();
},
afterHook: async () => {
systemStartCb();
// init system config
getInitConfig();
//init vector database, init root user