Test mongo log (#4443)

* feat: mongodb-log (#4426)

* perf: mongo log

* feat: completions stop reasoner

* mongo db log

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
This commit is contained in:
Archer
2025-04-03 11:43:34 +08:00
committed by archer
parent 252ad8a611
commit 915569fe79
47 changed files with 305 additions and 137 deletions

40
env.d.ts vendored Normal file
View File

@@ -0,0 +1,40 @@
declare global {
namespace NodeJS {
interface ProcessEnv {
LOG_DEPTH: string;
DEFAULT_ROOT_PSW: string;
DB_MAX_LINK: string;
TOKEN_KEY: string;
FILE_TOKEN_KEY: string;
ROOT_KEY: string;
OPENAI_BASE_URL: string;
CHAT_API_KEY: string;
AIPROXY_API_ENDPOINT: string;
AIPROXY_API_TOKEN: string;
MULTIPLE_DATA_TO_BASE64: string;
MONGODB_URI: string;
MONGODB_LOG_URI?: string;
PG_URL: string;
OCEANBASE_URL: string;
MILVUS_ADDRESS: string;
MILVUS_TOKEN: string;
SANDBOX_URL: string;
PRO_URL: string;
FE_DOMAIN: string;
FILE_DOMAIN: string;
NEXT_PUBLIC_BASE_URL: string;
LOG_LEVEL: string;
STORE_LOG_LEVEL: string;
USE_IP_LIMIT: string;
WORKFLOW_MAX_RUN_TIMES: string;
WORKFLOW_MAX_LOOP_TIMES: string;
CHECK_INTERNAL_IP: string;
CHAT_LOG_URL: string;
CHAT_LOG_INTERVAL: string;
CHAT_LOG_SOURCE_ID_PREFIX: string;
ALLOWED_ORIGINS: string;
}
}
}
export {};

View File

@@ -1,3 +1,5 @@
import { i18nT } from '../../../web/i18n/utils';
export enum ChatCompletionRequestMessageRoleEnum {
'System' = 'system',
'User' = 'user',
@@ -28,3 +30,13 @@ export enum EmbeddingTypeEnm {
query = 'query',
db = 'db'
}
export const completionFinishReasonMap = {
close: i18nT('chat:completion_finish_close'),
stop: i18nT('chat:completion_finish_stop'),
length: i18nT('chat:completion_finish_length'),
tool_calls: i18nT('chat:completion_finish_tool_calls'),
content_filter: i18nT('chat:completion_finish_content_filter'),
function_call: i18nT('chat:completion_finish_function_call'),
null: i18nT('chat:completion_finish_null')
};

View File

@@ -73,6 +73,15 @@ export type ChatCompletionMessageFunctionCall =
export type StreamChatType = Stream<openai.Chat.Completions.ChatCompletionChunk>;
export type UnStreamChatType = openai.Chat.Completions.ChatCompletion;
export type CompletionFinishReason =
| 'close'
| 'stop'
| 'length'
| 'tool_calls'
| 'content_filter'
| 'function_call'
| null;
export default openai;
export * from 'openai';

View File

@@ -22,6 +22,7 @@ import { UserSelectOptionType } from '../template/system/userSelect/type';
import { WorkflowResponseType } from '../../../../service/core/workflow/dispatch/type';
import { AiChatQuoteRoleType } from '../template/system/aiChat/type';
import { LafAccountType, OpenaiAccountType } from '../../../support/user/team/type';
import { CompletionFinishReason } from '../../ai/type';
export type ExternalProviderType = {
openaiAccount?: OpenaiAccountType;
@@ -130,6 +131,7 @@ export type DispatchNodeResponseType = {
obj: `${ChatRoleEnum}`;
value: string;
}[]; // completion context array. history will slice
finishReason?: CompletionFinishReason;
// dataset search
similarity?: number;

View File

@@ -1,17 +1,26 @@
import { addLog } from '../../common/system/log';
import mongoose, { Model } from 'mongoose';
import mongoose, { Model, Mongoose } from 'mongoose';
export default mongoose;
export * from 'mongoose';
export const MONGO_URL = process.env.MONGODB_URI as string;
export const MONGO_LOG_URL = (process.env.MONGODB_LOG_URI ?? process.env.MONGODB_URI) as string;
export const connectionMongo = (() => {
if (!global.mongodb) {
global.mongodb = mongoose;
global.mongodb = new Mongoose();
}
return global.mongodb;
})();
export const connectionLogMongo = (() => {
if (!global.mongodbLog) {
global.mongodbLog = new Mongoose();
}
return global.mongodbLog;
})();
const addCommonMiddleware = (schema: mongoose.Schema) => {
const operations = [
/^find/,
@@ -71,6 +80,19 @@ export const getMongoModel = <T>(name: string, schema: mongoose.Schema) => {
return model;
};
export const getMongoLogModel = <T>(name: string, schema: mongoose.Schema) => {
if (connectionLogMongo.models[name]) return connectionLogMongo.models[name] as Model<T>;
console.log('Load model======', name);
addCommonMiddleware(schema);
const model = connectionLogMongo.model<T>(name, schema);
// Sync index
syncMongoIndex(model);
return model;
};
const syncMongoIndex = async (model: Model<any>) => {
if (process.env.SYNC_INDEX !== '0' && process.env.NODE_ENV !== 'test') {
try {

View File

@@ -1,6 +1,5 @@
import { delay } from '@fastgpt/global/common/system/utils';
import { addLog } from '../system/log';
import { connectionMongo } from './index';
import type { Mongoose } from 'mongoose';
const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
@@ -8,41 +7,41 @@ const maxConnecting = Math.max(30, Number(process.env.DB_MAX_LINK || 20));
/**
* connect MongoDB and init data
*/
export async function connectMongo(): Promise<Mongoose> {
export async function connectMongo(db: Mongoose, url: string): Promise<Mongoose> {
/* Connecting, connected will return */
if (connectionMongo.connection.readyState !== 0) {
return connectionMongo;
if (db.connection.readyState !== 0) {
return db;
}
console.log('mongo start connect');
console.log('MongoDB start connect');
try {
// Remove existing listeners to prevent duplicates
connectionMongo.connection.removeAllListeners('error');
connectionMongo.connection.removeAllListeners('disconnected');
connectionMongo.set('strictQuery', 'throw');
db.connection.removeAllListeners('error');
db.connection.removeAllListeners('disconnected');
db.set('strictQuery', 'throw');
connectionMongo.connection.on('error', async (error) => {
db.connection.on('error', async (error) => {
console.log('mongo error', error);
try {
if (connectionMongo.connection.readyState !== 0) {
await connectionMongo.disconnect();
if (db.connection.readyState !== 0) {
await db.disconnect();
await delay(1000);
await connectMongo();
await connectMongo(db, url);
}
} catch (error) {}
});
connectionMongo.connection.on('disconnected', async () => {
db.connection.on('disconnected', async () => {
console.log('mongo disconnected');
try {
if (connectionMongo.connection.readyState !== 0) {
await connectionMongo.disconnect();
if (db.connection.readyState !== 0) {
await db.disconnect();
await delay(1000);
await connectMongo();
await connectMongo(db, url);
}
} catch (error) {}
});
await connectionMongo.connect(process.env.MONGODB_URI as string, {
const options = {
bufferCommands: true,
maxConnecting: maxConnecting,
maxPoolSize: maxConnecting,
@@ -53,18 +52,18 @@ export async function connectMongo(): Promise<Mongoose> {
maxIdleTimeMS: 300000,
retryWrites: true,
retryReads: true
};
// readPreference: 'secondaryPreferred',
// readConcern: { level: 'local' },
// writeConcern: { w: 'majority', j: true }
});
db.connect(url, options);
console.log('mongo connected');
return connectionMongo;
return db;
} catch (error) {
addLog.error('mongo connect error', error);
await connectionMongo.disconnect();
addLog.error('Mongo connect error', error);
await db.disconnect();
await delay(1000);
return connectMongo();
return connectMongo(db, url);
}
}

View File

@@ -3,4 +3,5 @@ import type { Logger } from 'winston';
declare global {
var mongodb: Mongoose | undefined;
var mongodbLog: Mongoose | undefined;
}

View File

@@ -1,4 +1,4 @@
import { getMongoModel, Schema } from '../../../common/mongo';
import { getMongoLogModel as getMongoModel, Schema } from '../../../common/mongo';
import { SystemLogType } from './type';
import { LogLevelEnum } from './constant';

View File

@@ -2,6 +2,7 @@ import { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import {
ChatCompletionCreateParamsNonStreaming,
ChatCompletionCreateParamsStreaming,
CompletionFinishReason,
StreamChatType
} from '@fastgpt/global/core/ai/type';
import { getLLMModel } from './model';
@@ -142,26 +143,40 @@ export const parseReasoningStreamContent = () => {
content?: string;
reasoning_content?: string;
};
finish_reason?: CompletionFinishReason;
}[];
},
parseThinkTag = false
): [string, string] => {
): {
reasoningContent: string;
content: string;
finishReason: CompletionFinishReason;
} => {
const content = part.choices?.[0]?.delta?.content || '';
const finishReason = part.choices?.[0]?.finish_reason || null;
// @ts-ignore
const reasoningContent = part.choices?.[0]?.delta?.reasoning_content || '';
if (reasoningContent || !parseThinkTag) {
isInThinkTag = false;
return [reasoningContent, content];
return { reasoningContent, content, finishReason };
}
if (!content) {
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
}
// 如果不在 think 标签中,或者有 reasoningContent(接口已解析),则返回 reasoningContent 和 content
if (isInThinkTag === false) {
return ['', content];
return {
reasoningContent: '',
content,
finishReason
};
}
// 检测是否为 think 标签开头的数据
@@ -170,17 +185,29 @@ export const parseReasoningStreamContent = () => {
startTagBuffer += content;
// 太少内容时候,暂时不解析
if (startTagBuffer.length < startTag.length) {
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
}
if (startTagBuffer.startsWith(startTag)) {
isInThinkTag = true;
return [startTagBuffer.slice(startTag.length), ''];
return {
reasoningContent: startTagBuffer.slice(startTag.length),
content: '',
finishReason
};
}
// 如果未命中 think 标签,则认为不在 think 标签中,返回 buffer 内容作为 content
isInThinkTag = false;
return ['', startTagBuffer];
return {
reasoningContent: '',
content: startTagBuffer,
finishReason
};
}
// 确认是 think 标签内容,开始返回 think 内容,并实时检测 </think>
@@ -201,19 +228,35 @@ export const parseReasoningStreamContent = () => {
if (endTagBuffer.includes(endTag)) {
isInThinkTag = false;
const answer = endTagBuffer.slice(endTag.length);
return ['', answer];
return {
reasoningContent: '',
content: answer,
finishReason
};
} else if (endTagBuffer.length >= endTag.length) {
// 缓存内容超出尾标签长度,且仍未命中 </think>,则认为本次猜测 </think> 失败,仍处于 think 阶段。
const tmp = endTagBuffer;
endTagBuffer = '';
return [tmp, ''];
return {
reasoningContent: tmp,
content: '',
finishReason
};
}
return ['', ''];
return {
reasoningContent: '',
content: '',
finishReason
};
} else if (content.includes(endTag)) {
// 返回内容,完整命中</think>,直接结束
isInThinkTag = false;
const [think, answer] = content.split(endTag);
return [think, answer];
return {
reasoningContent: think,
content: answer,
finishReason
};
} else {
// 无 buffer且未命中 </think>,开始疑似 </think> 检测。
for (let i = 1; i < endTag.length; i++) {
@@ -222,13 +265,21 @@ export const parseReasoningStreamContent = () => {
if (content.endsWith(partialEndTag)) {
const think = content.slice(0, -partialEndTag.length);
endTagBuffer += partialEndTag;
return [think, ''];
return {
reasoningContent: think,
content: '',
finishReason
};
}
}
}
// 完全未命中尾标签,还是 think 阶段。
return [content, ''];
return {
reasoningContent: content,
content: '',
finishReason
};
};
const getStartTagBuffer = () => startTagBuffer;

View File

@@ -176,7 +176,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
toolNodeOutputTokens,
completeMessages = [], // The actual message sent to AI(just save text)
assistantResponses = [], // FastGPT system store assistant.value response
runTimes
runTimes,
finish_reason
} = await (async () => {
const adaptMessages = chats2GPTMessages({
messages,
@@ -276,7 +277,8 @@ export const dispatchRunTools = async (props: DispatchToolModuleProps): Promise<
useVision
),
toolDetail: childToolResponse,
mergeSignId: nodeId
mergeSignId: nodeId,
finishReason: finish_reason
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
// 工具调用本身的积分消耗

View File

@@ -1,6 +1,10 @@
import { createChatCompletion } from '../../../../ai/config';
import { filterGPTMessageByMaxContext, loadRequestMessages } from '../../../../chat/utils';
import { StreamChatType, ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import {
StreamChatType,
ChatCompletionMessageParam,
CompletionFinishReason
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import { responseWriteController } from '../../../../../common/response';
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
@@ -252,9 +256,9 @@ export const runToolWithPromptCall = async (
}
});
const { answer, reasoning } = await (async () => {
const { answer, reasoning, finish_reason } = await (async () => {
if (res && isStreamResponse) {
const { answer, reasoning } = await streamResponse({
const { answer, reasoning, finish_reason } = await streamResponse({
res,
toolNodes,
stream: aiResponse,
@@ -262,8 +266,9 @@ export const runToolWithPromptCall = async (
aiChatReasoning
});
return { answer, reasoning };
return { answer, reasoning, finish_reason };
} else {
const finish_reason = aiResponse.choices?.[0]?.finish_reason as CompletionFinishReason;
const content = aiResponse.choices?.[0]?.message?.content || '';
const reasoningContent: string = aiResponse.choices?.[0]?.message?.reasoning_content || '';
@@ -271,14 +276,16 @@ export const runToolWithPromptCall = async (
if (reasoningContent || !aiChatReasoning) {
return {
answer: content,
reasoning: reasoningContent
reasoning: reasoningContent,
finish_reason
};
}
const [think, answer] = parseReasoningContent(content);
return {
answer,
reasoning: think
reasoning: think,
finish_reason
};
}
})();
@@ -525,7 +532,8 @@ ANSWER: `;
toolNodeInputTokens,
toolNodeOutputTokens,
assistantResponses: toolNodeAssistants,
runTimes
runTimes,
finish_reason
}
);
};
@@ -550,15 +558,18 @@ async function streamResponse({
let startResponseWrite = false;
let answer = '';
let reasoning = '';
let finish_reason: CompletionFinishReason = null;
const { parsePart, getStartTagBuffer } = parseReasoningStreamContent();
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finish_reason = 'close';
break;
}
const [reasoningContent, content] = parsePart(part, aiChatReasoning);
const { reasoningContent, content, finishReason } = parsePart(part, aiChatReasoning);
finish_reason = finish_reason || finishReason;
answer += content;
reasoning += reasoningContent;
@@ -618,7 +629,7 @@ async function streamResponse({
}
}
return { answer, reasoning };
return { answer, reasoning, finish_reason };
}
const parseAnswer = (

View File

@@ -7,7 +7,8 @@ import {
ChatCompletionToolMessageParam,
ChatCompletionMessageParam,
ChatCompletionTool,
ChatCompletionAssistantMessageParam
ChatCompletionAssistantMessageParam,
CompletionFinishReason
} from '@fastgpt/global/core/ai/type';
import { NextApiResponse } from 'next';
import { responseWriteController } from '../../../../../common/response';
@@ -300,7 +301,7 @@ export const runToolWithToolChoice = async (
}
});
const { answer, toolCalls } = await (async () => {
const { answer, toolCalls, finish_reason } = await (async () => {
if (res && isStreamResponse) {
return streamResponse({
res,
@@ -310,6 +311,7 @@ export const runToolWithToolChoice = async (
});
} else {
const result = aiResponse as ChatCompletion;
const finish_reason = result.choices?.[0]?.finish_reason as CompletionFinishReason;
const calls = result.choices?.[0]?.message?.tool_calls || [];
const answer = result.choices?.[0]?.message?.content || '';
@@ -350,7 +352,8 @@ export const runToolWithToolChoice = async (
return {
answer,
toolCalls: toolCalls
toolCalls: toolCalls,
finish_reason
};
}
})();
@@ -549,8 +552,9 @@ export const runToolWithToolChoice = async (
toolNodeOutputTokens,
completeMessages,
assistantResponses: toolNodeAssistants,
toolWorkflowInteractiveResponse,
runTimes,
toolWorkflowInteractiveResponse
finish_reason
};
}
@@ -565,7 +569,8 @@ export const runToolWithToolChoice = async (
toolNodeInputTokens,
toolNodeOutputTokens,
assistantResponses: toolNodeAssistants,
runTimes
runTimes,
finish_reason
}
);
} else {
@@ -588,7 +593,8 @@ export const runToolWithToolChoice = async (
completeMessages,
assistantResponses: [...assistantResponses, ...toolNodeAssistant.value],
runTimes: (response?.runTimes || 0) + 1
runTimes: (response?.runTimes || 0) + 1,
finish_reason
};
}
};
@@ -612,14 +618,18 @@ async function streamResponse({
let textAnswer = '';
let callingTool: { name: string; arguments: string } | null = null;
let toolCalls: ChatCompletionMessageToolCall[] = [];
let finishReason: CompletionFinishReason = null;
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finishReason = 'close';
break;
}
const responseChoice = part.choices?.[0]?.delta;
const finish_reason = part.choices?.[0]?.finish_reason as CompletionFinishReason;
finishReason = finishReason || finish_reason;
if (responseChoice?.content) {
const content = responseChoice.content || '';
@@ -705,5 +715,5 @@ async function streamResponse({
}
}
return { answer: textAnswer, toolCalls };
return { answer: textAnswer, toolCalls, finish_reason: finishReason };
}

View File

@@ -1,4 +1,4 @@
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
import { ChatCompletionMessageParam, CompletionFinishReason } from '@fastgpt/global/core/ai/type';
import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import type {
ModuleDispatchProps,
@@ -43,6 +43,7 @@ export type RunToolResponse = {
assistantResponses?: AIChatItemValueItemType[];
toolWorkflowInteractiveResponse?: WorkflowInteractiveResponseType;
[DispatchNodeResponseKeyEnum.runTimes]: number;
finish_reason?: CompletionFinishReason;
};
export type ToolNodeItemType = RuntimeNodeItemType & {
toolParams: RuntimeNodeItemType['inputs'];

View File

@@ -6,7 +6,11 @@ import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/cons
import { textAdaptGptResponse } from '@fastgpt/global/core/workflow/runtime/utils';
import { parseReasoningContent, parseReasoningStreamContent } from '../../../ai/utils';
import { createChatCompletion } from '../../../ai/config';
import type { ChatCompletionMessageParam, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import type {
ChatCompletionMessageParam,
CompletionFinishReason,
StreamChatType
} from '@fastgpt/global/core/ai/type.d';
import { formatModelChars2Points } from '../../../../support/wallet/usage/utils';
import type { LLMModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '../../../../common/api/requestPlusApi';
@@ -101,7 +105,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
const modelConstantsData = getLLMModel(model);
if (!modelConstantsData) {
return Promise.reject('The chat model is undefined, you need to select a chat model.');
return Promise.reject(`Mode ${model} is undefined, you need to select a chat model.`);
}
aiChatVision = modelConstantsData.vision && aiChatVision;
@@ -195,16 +199,17 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
}
});
const { answerText, reasoningText } = await (async () => {
const { answerText, reasoningText, finish_reason } = await (async () => {
if (isStreamResponse) {
if (!res) {
return {
answerText: '',
reasoningText: ''
reasoningText: '',
finish_reason: 'close' as const
};
}
// sse response
const { answer, reasoning } = await streamResponse({
const { answer, reasoning, finish_reason } = await streamResponse({
res,
stream: response,
aiChatReasoning,
@@ -215,9 +220,12 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: answer,
reasoningText: reasoning
reasoningText: reasoning,
finish_reason
};
} else {
const finish_reason = response.choices?.[0]?.finish_reason as CompletionFinishReason;
const { content, reasoningContent } = (() => {
const content = response.choices?.[0]?.message?.content || '';
// @ts-ignore
@@ -260,7 +268,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return {
answerText: content,
reasoningText: reasoningContent
reasoningText: reasoningContent,
finish_reason
};
}
})();
@@ -303,7 +312,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
maxToken: max_tokens,
reasoningText,
historyPreview: getHistoryPreview(chatCompleteMessages, 10000, aiChatVision),
contextTotalLen: completeMessages.length
contextTotalLen: completeMessages.length,
finishReason: finish_reason
},
[DispatchNodeResponseKeyEnum.nodeDispatchUsages]: [
{
@@ -528,15 +538,18 @@ async function streamResponse({
});
let answer = '';
let reasoning = '';
let finish_reason: CompletionFinishReason = null;
const { parsePart, getStartTagBuffer } = parseReasoningStreamContent();
for await (const part of stream) {
if (res.closed) {
stream.controller?.abort();
finish_reason = 'close';
break;
}
const [reasoningContent, content] = parsePart(part, parseThinkTag);
const { reasoningContent, content, finishReason } = parsePart(part, parseThinkTag);
finish_reason = finish_reason || finishReason;
answer += content;
reasoning += reasoningContent;
@@ -575,5 +588,5 @@ async function streamResponse({
}
}
return { answer, reasoning };
return { answer, reasoning, finish_reason };
}

View File

@@ -3,5 +3,5 @@
"compilerOptions": {
"baseUrl": "."
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../**/*.d.ts"]
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", "**/*.d.ts", "../../**/*.d.ts"]
}

View File

@@ -11,6 +11,14 @@
"chat_test_app": "Debug-{{name}}",
"citations": "{{num}} References",
"click_contextual_preview": "Click to see contextual preview",
"completion_finish_close": "Disconnection",
"completion_finish_content_filter": "Trigger safe wind control",
"completion_finish_function_call": "Function Calls",
"completion_finish_length": "Reply limit exceeded",
"completion_finish_null": "unknown",
"completion_finish_reason": "Reason for completion",
"completion_finish_stop": "Completed normally",
"completion_finish_tool_calls": "Tool calls",
"config_input_guide": "Set Up Input Guide",
"config_input_guide_lexicon": "Set Up Lexicon",
"config_input_guide_lexicon_title": "Set Up Lexicon",

View File

@@ -11,6 +11,14 @@
"chat_test_app": "调试-{{name}}",
"citations": "{{num}}条引用",
"click_contextual_preview": "点击查看上下文预览",
"completion_finish_close": "连接断开",
"completion_finish_content_filter": "触发安全风控",
"completion_finish_function_call": "函数调用",
"completion_finish_length": "超出回复限制",
"completion_finish_null": "未知",
"completion_finish_reason": "完成原因",
"completion_finish_stop": "正常完成",
"completion_finish_tool_calls": "工具调用",
"config_input_guide": "配置输入引导",
"config_input_guide_lexicon": "配置词库",
"config_input_guide_lexicon_title": "配置词库",

View File

@@ -11,6 +11,14 @@
"chat_test_app": "調試-{{name}}",
"citations": "{{num}} 筆引用",
"click_contextual_preview": "點選檢視上下文預覽",
"completion_finish_close": "連接斷開",
"completion_finish_content_filter": "觸發安全風控",
"completion_finish_function_call": "函數調用",
"completion_finish_length": "超出回复限制",
"completion_finish_null": "未知",
"completion_finish_reason": "完成原因",
"completion_finish_stop": "正常完成",
"completion_finish_tool_calls": "工具調用",
"config_input_guide": "設定輸入導引",
"config_input_guide_lexicon": "設定詞彙庫",
"config_input_guide_lexicon_title": "設定詞彙庫",

View File

@@ -24,7 +24,8 @@ MULTIPLE_DATA_TO_BASE64=true
REDIS_URL=redis://default:password@127.0.0.1:6379
# mongo 数据库连接参数,本地开发连接远程数据库时,可能需要增加 directConnection=true 参数,才能连接上。
MONGODB_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin
# 日志库
MONGODB_LOG_URI=mongodb://username:password@0.0.0.0:27017/fastgpt?authSource=admin
# 向量库优先级: pg > oceanbase > milvus
# PG 向量库连接参数
PG_URL=postgresql://username:password@host:port/postgres

View File

@@ -99,7 +99,6 @@ const SettingLLMModel = ({
<AISettingModal
onClose={onCloseAIChatSetting}
onSuccess={(e) => {
console.log(e);
onChange(e);
onCloseAIChatSetting();
}}

View File

@@ -17,6 +17,7 @@ import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import EmptyTip from '@fastgpt/web/components/common/EmptyTip';
import { completionFinishReasonMap } from '@fastgpt/global/core/ai/constants';
type sideTabItemType = {
moduleLogo?: string;
@@ -196,6 +197,13 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.module maxToken')}
value={activeModule?.maxToken}
/>
{activeModule?.finishReason && (
<Row
label={t('chat:completion_finish_reason')}
value={t(completionFinishReasonMap[activeModule?.finishReason])}
/>
)}
<Row label={t('chat:reasoning_text')} value={activeModule?.reasoningText} />
<Row
label={t('common:core.chat.response.module historyPreview')}

View File

@@ -9,6 +9,7 @@ export async function register() {
// 基础系统初始化
const [
{ connectMongo },
{ connectionMongo, connectionLogMongo, MONGO_URL, MONGO_LOG_URL },
{ systemStartCb },
{ initGlobalVariables, getInitConfig, initSystemPluginGroups, initAppTemplateTypes },
{ initVectorStore },
@@ -19,6 +20,7 @@ export async function register() {
{ startTrainingQueue }
] = await Promise.all([
import('@fastgpt/service/common/mongo/init'),
import('@fastgpt/service/common/mongo/index'),
import('@fastgpt/service/common/system/tools'),
import('@/service/common/system'),
import('@fastgpt/service/common/vectorStore/controller'),
@@ -34,7 +36,8 @@ export async function register() {
initGlobalVariables();
// Connect to MongoDB
await connectMongo();
await connectMongo(connectionMongo, MONGO_URL);
connectMongo(connectionLogMongo, MONGO_LOG_URL);
//init system configinit vector databaseinit root user
await Promise.all([getInitConfig(), initVectorStore(), initRootUser()]);

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { addHours } from 'date-fns';
import { MongoImage } from '@fastgpt/service/common/file/image/schema';
@@ -56,7 +55,6 @@ async function checkInvalidImg(start: Date, end: Date, limit = 50) {
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
import { PluginTypeEnum } from '@fastgpt/global/core/plugin/constants';
@@ -8,7 +7,6 @@ import { PluginTypeEnum } from '@fastgpt/global/core/plugin/constants';
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await MongoPlugin.updateMany(

View File

@@ -1,13 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
// 删除索引

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoAppVersion } from '@fastgpt/service/core/app/version/schema';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
@@ -9,7 +8,6 @@ import { POST } from '@fastgpt/service/common/api/plusRequest';
/* 初始化发布的版本 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await MongoAppVersion.updateMany(

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { addHours } from 'date-fns';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
@@ -174,7 +173,6 @@ const checkInvalidDataText = async () => {
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
const { start = -2, end = -360 * 24 } = req.body as { start: number; end: number };

View File

@@ -1,14 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { PgClient } from '@fastgpt/service/common/vectorStore/pg';
import { MongoApp } from '@fastgpt/service/core/app/schema';
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await MongoApp.updateMany(

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoDataset } from '@fastgpt/service/core/dataset/schema';
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
@@ -8,7 +7,6 @@ import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/
/* pg 中的数据搬到 mongo dataset.datas 中,并做映射 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
await authCert({ req, authRoot: true });
await MongoDataset.updateMany(

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authFileToken } from '@fastgpt/service/support/permission/controller';
import { getDownloadStream, getFileById } from '@fastgpt/service/common/file/gridfs/controller';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
@@ -23,8 +22,6 @@ const previewableExtensions = [
// Abandoned, use: file/read/[filename].ts
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { token } = req.query as { token: string };
const { fileId, bucketName } = await authFileToken(token);

View File

@@ -1,6 +1,5 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authFileToken } from '@fastgpt/service/support/permission/controller';
import { getDownloadStream, getFileById } from '@fastgpt/service/common/file/gridfs/controller';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
@@ -21,8 +20,6 @@ const previewableExtensions = [
];
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { token, filename } = req.query as { token: string; filename: string };
const { fileId, bucketName } = await authFileToken(token);

View File

@@ -1,5 +1,4 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { connectToDatabase } from '@/service/mongo';
import { uploadMongoImg } from '@fastgpt/service/common/file/image/controller';
import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
@@ -9,7 +8,6 @@ import { NextAPI } from '@/service/middleware/entry';
Upload avatar image
*/
async function handler(req: NextApiRequest, res: NextApiResponse): Promise<string> {
await connectToDatabase();
const body = req.body as UploadImgProps;
const { teamId } = await authCert({ req, authToken: true });

View File

@@ -1,12 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { startTrainingQueue } from '@/service/core/dataset/training/utils';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
await authCert({ req, authToken: true });
startTrainingQueue();
} catch (error) {}

View File

@@ -2,13 +2,12 @@
import { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { connectToDatabase } from '@/service/mongo';
import { UrlFetchParams, UrlFetchResponse } from '@fastgpt/global/common/file/api.d';
import { urlsFetch } from '@fastgpt/service/common/string/cheerio';
const fetchContent = async (req: NextApiRequest, res: NextApiResponse) => {
try {
await connectToDatabase();
let { urlList = [], selector } = req.body as UrlFetchParams;
if (!urlList || urlList.length === 0) {

View File

@@ -1,6 +1,6 @@
import type { NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { pushQuestionGuideUsage } from '@/service/support/wallet/usage/push';
import { createQuestionGuide } from '@fastgpt/service/core/ai/functions/createQuestionGuide';
import { ApiRequestProps } from '@fastgpt/service/type/next';
@@ -27,7 +27,6 @@ async function handler(
res: NextApiResponse<any>
) {
try {
await connectToDatabase();
const { messages } = req.body;
const { tmbId, teamId } = await authChatCert({

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import type { AdminUpdateFeedbackParams } from '@/global/core/chat/api.d';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
import { authChatCrud } from '@/service/support/permission/auth/chat';
@@ -8,7 +8,6 @@ import { authChatCrud } from '@/service/support/permission/auth/chat';
/* 初始化我的聊天框,需要身份验证 */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { appId, chatId, dataId, datasetId, feedbackDataId, q, a } =
req.body as AdminUpdateFeedbackParams;

View File

@@ -1,6 +1,6 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import type { CloseCustomFeedbackParams } from '@/global/core/chat/api.d';
import { MongoChatItem } from '@fastgpt/service/core/chat/chatItemSchema';
@@ -10,7 +10,6 @@ import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
/* remove custom feedback */
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { appId, chatId, dataId, index } = req.body as CloseCustomFeedbackParams;
if (!dataId || !appId || !chatId) {

View File

@@ -1,6 +1,6 @@
import type { NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { GetChatSpeechProps } from '@/global/core/chat/api.d';
import { text2Speech } from '@fastgpt/service/core/ai/audio/speech';
import { pushAudioSpeechUsage } from '@/service/support/wallet/usage/push';
@@ -17,7 +17,6 @@ import { ApiRequestProps } from '@fastgpt/service/type/next';
*/
async function handler(req: ApiRequestProps<GetChatSpeechProps>, res: NextApiResponse) {
try {
await connectToDatabase();
const { ttsConfig, input } = req.body;
if (!ttsConfig.model || !ttsConfig.voice) {

View File

@@ -1,11 +1,10 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { request } from 'https';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { path = [], ...query } = req.query as any;
const queryStr = new URLSearchParams(query).toString();

View File

@@ -1,12 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { request } from 'http';
import { FastGPTProUrl } from '@fastgpt/service/common/system/constants';
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { path = [], ...query } = req.query as any;
const requestPath = `/api/${path?.join('/')}?${new URLSearchParams(query).toString()}`;

View File

@@ -2,12 +2,11 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectToDatabase } from '@/service/mongo';
import { MongoTeamMember } from '@fastgpt/service/support/user/team/teamMemberSchema';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { oldPsw, newPsw } = req.body as { oldPsw: string; newPsw: string };
if (!oldPsw || !newPsw) {

View File

@@ -1,12 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
import { checkDatasetLimit } from '@fastgpt/service/support/permission/teamLimit';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
const { size } = req.query as {
size: string;
};

View File

@@ -1,13 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { checkWebSyncLimit } from '@fastgpt/service/support/user/utils';
import { authCert } from '@fastgpt/service/support/permission/auth/common';
export default async function handler(req: NextApiRequest, res: NextApiResponse<any>) {
try {
await connectToDatabase();
// 凭证校验
const { teamId } = await authCert({ req, authToken: true });

View File

@@ -1,12 +1,11 @@
import type { NextApiRequest, NextApiResponse } from 'next';
import { jsonRes } from '@fastgpt/service/common/response';
import { connectToDatabase } from '@/service/mongo';
import { readMongoImg } from '@fastgpt/service/common/file/image/controller';
// get the models available to the system
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
try {
await connectToDatabase();
const { id } = req.query as { id: string };
const { binary, mime } = await readMongoImg({ id });

View File

@@ -17,7 +17,7 @@ import { getInitOutLinkChatInfo } from '@/web/core/chat/api';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
import { MongoOutLink } from '@fastgpt/service/support/outLink/schema';
import { addLog } from '@fastgpt/service/common/system/log';
import { connectToDatabase } from '@/service/mongo';
import NextHead from '@/components/common/NextHead';
import { useContextSelector } from 'use-context-selector';
import ChatContextProvider, { ChatContext } from '@/web/core/chat/context/chatContext';
@@ -391,7 +391,6 @@ export async function getServerSideProps(context: any) {
const app = await (async () => {
try {
await connectToDatabase();
return MongoOutLink.findOne(
{
shareId

View File

@@ -1,19 +1,9 @@
import { PRICE_SCALE } from '@fastgpt/global/support/wallet/constants';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { createDefaultTeam } from '@fastgpt/service/support/user/team/controller';
import { exit } from 'process';
import { mongoSessionRun } from '@fastgpt/service/common/mongo/sessionRun';
/**
* This function is equivalent to the entry to the service
* connect MongoDB and init data
*/
export function connectToDatabase() {
return connectMongo();
}
export async function initRootUser(retry = 3): Promise<any> {
try {
const rootUser = await MongoUser.findOne({

View File

@@ -1,6 +1,12 @@
import { existsSync, readFileSync } from 'fs';
import mongoose from '@fastgpt/service/common/mongo';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
import {
connectionMongo,
connectionLogMongo,
MONGO_URL,
MONGO_LOG_URL
} from '@fastgpt/service/common/mongo';
import { initGlobalVariables } from '@/service/common/system';
import { afterAll, beforeAll, vi } from 'vitest';
import { setup, teardown } from 'vitest-mongodb';
@@ -63,7 +69,8 @@ beforeAll(async () => {
});
vi.stubEnv('MONGODB_URI', (globalThis as any).__MONGO_URI__);
initGlobalVariables();
await connectMongo();
await connectMongo(connectionMongo, MONGO_URL);
await connectMongo(connectionLogMongo, MONGO_LOG_URL);
// await getInitConfig();
if (existsSync('projects/app/.env.local')) {