perf: request llm (#6191)

* perf: request error info

* perf: request llm'

* perf: request llm'

* openapi doc
This commit is contained in:
Archer
2026-01-06 13:21:57 +08:00
committed by GitHub
parent f7e46ec760
commit 9f2adcd523
26 changed files with 425 additions and 254 deletions

View File

@@ -149,6 +149,18 @@ export const WholeResponseContent = ({
value={formatNumber(activeModule.childTotalPoints)}
/>
)}
<Row label={t('workflow:response.Error')} value={activeModule?.error} />
<Row label={t('workflow:response.Error')} value={activeModule?.errorText} />
<Row label={t('chat:response.node_inputs')} value={activeModule?.nodeInputs} />
</>
{/* ai chat */}
<>
{activeModule?.finishReason && (
<Row
label={t('chat:completion_finish_reason')}
value={t(completionFinishReasonMap[activeModule?.finishReason])}
/>
)}
<Row label={t('common:core.chat.response.module model')} value={activeModule?.model} />
{activeModule?.tokens && (
<Row label={t('chat:llm_tokens')} value={`${activeModule?.tokens}`} />
@@ -171,12 +183,6 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.context total length')}
value={activeModule?.contextTotalLen}
/>
<Row label={t('workflow:response.Error')} value={activeModule?.error} />
<Row label={t('workflow:response.Error')} value={activeModule?.errorText} />
<Row label={t('chat:response.node_inputs')} value={activeModule?.nodeInputs} />
</>
{/* ai chat */}
<>
<Row
label={t('common:core.chat.response.module temperature')}
value={activeModule?.temperature}
@@ -185,12 +191,6 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.module maxToken')}
value={activeModule?.maxToken}
/>
{activeModule?.finishReason && (
<Row
label={t('chat:completion_finish_reason')}
value={t(completionFinishReasonMap[activeModule?.finishReason])}
/>
)}
<Row label={t('chat:reasoning_text')} value={activeModule?.reasoningText} />
<Row

View File

@@ -15,11 +15,6 @@ export type GetChatSpeechProps = OutLinkChatAuthProps & {
};
/* ---------- chat ----------- */
export type InitChatProps = {
appId?: string;
chatId?: string;
loadCustomFeedbacks?: boolean;
};
export type GetChatRecordsProps = OutLinkChatAuthProps & {
appId: string;

View File

@@ -1,9 +1,8 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authApp } from '@fastgpt/service/support/permission/app/auth';
import { getGuideModule, getAppChatConfig } from '@fastgpt/global/core/workflow/utils';
import { getChatModelNameListByModules } from '@/service/core/app/workflow';
import type { InitChatProps, InitChatResponse } from '@/global/core/chat/api.d';
import type { InitChatResponse } from '@/global/core/chat/api.d';
import { MongoChat } from '@fastgpt/service/core/chat/chatSchema';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
import { getAppLatestVersion } from '@fastgpt/service/core/app/version/controller';
@@ -14,19 +13,10 @@ import { presignVariablesFileUrls } from '@fastgpt/service/core/chat/utils';
import { MongoAppRecord } from '@fastgpt/service/core/app/record/schema';
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { InitChatQuerySchema } from '@fastgpt/global/openapi/core/chat/controler/api';
async function handler(
req: NextApiRequest,
res: NextApiResponse
): Promise<InitChatResponse | void> {
let { appId, chatId } = req.query as InitChatProps;
if (!appId) {
return jsonRes(res, {
code: 501,
message: "You don't have an app yet"
});
}
async function handler(req: NextApiRequest, res: NextApiResponse): Promise<InitChatResponse> {
const { appId, chatId } = InitChatQuerySchema.parse(req.query);
try {
// auth app permission
@@ -99,9 +89,3 @@ async function handler(
}
export default NextAPI(handler);
export const config = {
api: {
responseLimit: '10mb'
}
};

View File

@@ -2,7 +2,6 @@ import { GET, POST, DELETE, PUT } from '@/web/common/api/request';
import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d';
import type { getResDataQuery } from '@/pages/api/core/chat/getResData';
import type {
InitChatProps,
InitChatResponse,
InitOutLinkChatProps,
InitTeamChatProps
@@ -24,7 +23,10 @@ import type {
UpdateFavouriteAppParamsType
} from '@fastgpt/global/openapi/core/chat/favourite/api';
import type { ChatFavouriteAppType } from '@fastgpt/global/core/chat/favouriteApp/type';
import type { StopV2ChatParams } from '@fastgpt/global/openapi/core/chat/controler/api';
import type {
InitChatQueryType,
StopV2ChatParams
} from '@fastgpt/global/openapi/core/chat/controler/api';
import type { GetRecentlyUsedAppsResponseType } from '@fastgpt/global/openapi/core/chat/api';
export const getRecentlyUsedApps = () =>
@@ -33,7 +35,7 @@ export const getRecentlyUsedApps = () =>
/**
* 获取初始化聊天内容
*/
export const getInitChatInfo = (data: InitChatProps) =>
export const getInitChatInfo = (data: InitChatQueryType) =>
GET<InitChatResponse>(`/core/chat/init`, data);
export const getInitOutLinkChatInfo = (data: InitOutLinkChatProps) =>
GET<InitChatResponse>(`/core/chat/outLink/init`, data);