From 05bb19799073922fc586eeff9ecb26f00e0f66d1 Mon Sep 17 00:00:00 2001 From: Archer <545436317@qq.com> Date: Sat, 21 Mar 2026 12:19:44 +0800 Subject: [PATCH] V4.14.9 features (#6599) * fix: image read and json error (Agent) (#6502) * fix: 1.image read 2.JSON parsing error * dataset cite and pause * perf: plancall second parse * add test --------- Co-authored-by: archer <545436317@qq.com> * master message * remove invalid code * feat(sre): integrate traces, logs, metrics into one sdk (#6580) * fix: image read and json error (Agent) (#6502) * fix: 1.image read 2.JSON parsing error * dataset cite and pause * perf: plancall second parse * add test --------- Co-authored-by: archer <545436317@qq.com> * master message * wip: otel sdk * feat(sre): integrate traces, logs, metrics into one sdk * fix(sre): use SpanStatusCode constants * fix(sre): clarify step memory measurement * update package * fix: ts --------- Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com> Co-authored-by: archer <545436317@qq.com> * doc * sandbox in agent (#6579) * doc * update template * fix: pr * fix: sdk package * update lock * update next * update dockerfile * dockerfile * dockerfile * update sdk version * update dockerefile * version --------- Co-authored-by: YeYuheng <57035043+YYH211@users.noreply.github.com> Co-authored-by: Ryo --- .claude/skills/system-test/SKILL.md | 8 + .../docs/self-host/upgrading/4-14/4149.mdx | 8 +- document/content/docs/toc.en.mdx | 1 + document/content/docs/toc.mdx | 1 + document/data/doc-last-modified.json | 8 +- .../core/chat/helperBot/topAgent/type.ts | 3 +- packages/service/common/logger/client.ts | 2 +- packages/service/common/logger/index.ts | 2 +- packages/service/common/metrics/client.ts | 12 + packages/service/common/metrics/index.ts | 1 + packages/service/common/middle/entry.ts | 157 +- packages/service/common/tracing/client.ts | 114 + packages/service/common/tracing/index.ts | 10 + .../chat/HelperBot/dispatch/topAgent/index.ts | 1 + .../HelperBot/dispatch/topAgent/prompt.ts | 14 + .../chat/HelperBot/dispatch/topAgent/type.ts | 18 +- .../chat/HelperBot/dispatch/topAgent/utils.ts | 13 +- .../core/workflow/dispatch/ai/agent/index.ts | 1 + .../service/core/workflow/dispatch/index.ts | 669 ++--- packages/service/core/workflow/metrics.ts | 241 ++ packages/service/env.ts | 12 +- packages/service/package.json | 7 +- .../common/Textarea/PromptEditor/Editor.tsx | 8 +- pnpm-lock.yaml | 413 ++- pnpm-workspace.yaml | 17 +- projects/app/.env.template | 14 +- projects/app/Dockerfile | 24 +- projects/app/next.config.ts | 44 +- projects/app/package.json | 2 +- .../ChatBox/components/ChatItem.tsx | 7 +- .../chat/HelperBot/components/HumanItem.tsx | 6 +- .../core/chat/HelperBot/context.tsx | 3 +- projects/app/src/instrumentation.ts | 6 + .../app/detail/Edit/ChatAgent/ChatTest.tsx | 5 +- .../app/detail/Edit/ChatAgent/EditForm.tsx | 3 +- .../Edit/ChatAgent/hooks/useSkillManager.tsx | 34 +- .../Flow/nodes/render/NodeCard.tsx | 6 +- sdk/otel/README.md | 86 + sdk/otel/package.json | 81 + sdk/otel/src/client.ts | 21 + sdk/otel/src/env-utils.ts | 32 + sdk/otel/src/env.ts | 71 + sdk/otel/src/index.ts | 7 + sdk/otel/src/logger-entry.ts | 1 + sdk/otel/src/logger/client.ts | 107 + sdk/otel/src/logger/env.ts | 62 + sdk/otel/src/logger/helpers.ts | 20 + sdk/otel/src/logger/index.ts | 22 + sdk/otel/src/logger/loggers.ts | 29 + sdk/otel/src/logger/otel.ts | 476 ++++ sdk/otel/src/logger/sinks.ts | 149 ++ sdk/otel/src/logger/types.ts | 37 + sdk/otel/src/metrics-entry.ts | 1 + sdk/otel/src/metrics/client.ts | 138 + sdk/otel/src/metrics/env.ts | 52 + sdk/otel/src/metrics/index.ts | 4 + sdk/otel/src/metrics/types.ts | 19 + sdk/otel/src/tracing-entry.ts | 3 + sdk/otel/src/tracing/client.ts | 162 ++ sdk/otel/src/tracing/env.ts | 74 + sdk/otel/src/tracing/index.ts | 4 + sdk/otel/src/tracing/types.ts | 19 + sdk/otel/src/types.ts | 9 + sdk/otel/tsconfig.json | 20 + sdk/otel/tsdown.config.ts | 10 + .../dispatch/checkNodeRunStatus.test.ts | 2347 ----------------- .../dispatch/checkNodeRunStatus/base.test.ts | 1376 ++++++++++ .../checkNodeRunStatus/boundary.test.ts | 267 ++ .../dispatch/checkNodeRunStatus/case.test.ts | 189 ++ .../dispatch/checkNodeRunStatus/safe.test.ts | 583 ++++ .../checkNodeRunStatus/toolcall.test.ts | 663 +++++ .../core/workflow/dispatch/index.test.ts | 0 .../core/workflow/utils.ts | 12 + .../core/workflow/utils/tarjan.test.ts | 438 +++ 74 files changed, 6599 insertions(+), 2887 deletions(-) create mode 100644 packages/service/common/metrics/client.ts create mode 100644 packages/service/common/metrics/index.ts create mode 100644 packages/service/common/tracing/client.ts create mode 100644 packages/service/common/tracing/index.ts create mode 100644 packages/service/core/workflow/metrics.ts create mode 100644 sdk/otel/README.md create mode 100644 sdk/otel/package.json create mode 100644 sdk/otel/src/client.ts create mode 100644 sdk/otel/src/env-utils.ts create mode 100644 sdk/otel/src/env.ts create mode 100644 sdk/otel/src/index.ts create mode 100644 sdk/otel/src/logger-entry.ts create mode 100644 sdk/otel/src/logger/client.ts create mode 100644 sdk/otel/src/logger/env.ts create mode 100644 sdk/otel/src/logger/helpers.ts create mode 100644 sdk/otel/src/logger/index.ts create mode 100644 sdk/otel/src/logger/loggers.ts create mode 100644 sdk/otel/src/logger/otel.ts create mode 100644 sdk/otel/src/logger/sinks.ts create mode 100644 sdk/otel/src/logger/types.ts create mode 100644 sdk/otel/src/metrics-entry.ts create mode 100644 sdk/otel/src/metrics/client.ts create mode 100644 sdk/otel/src/metrics/env.ts create mode 100644 sdk/otel/src/metrics/index.ts create mode 100644 sdk/otel/src/metrics/types.ts create mode 100644 sdk/otel/src/tracing-entry.ts create mode 100644 sdk/otel/src/tracing/client.ts create mode 100644 sdk/otel/src/tracing/env.ts create mode 100644 sdk/otel/src/tracing/index.ts create mode 100644 sdk/otel/src/tracing/types.ts create mode 100644 sdk/otel/src/types.ts create mode 100644 sdk/otel/tsconfig.json create mode 100644 sdk/otel/tsdown.config.ts delete mode 100644 test/cases/global/core/workflow/dispatch/checkNodeRunStatus.test.ts create mode 100644 test/cases/service/core/workflow/dispatch/checkNodeRunStatus/base.test.ts create mode 100644 test/cases/service/core/workflow/dispatch/checkNodeRunStatus/boundary.test.ts create mode 100644 test/cases/service/core/workflow/dispatch/checkNodeRunStatus/case.test.ts create mode 100644 test/cases/service/core/workflow/dispatch/checkNodeRunStatus/safe.test.ts create mode 100644 test/cases/service/core/workflow/dispatch/checkNodeRunStatus/toolcall.test.ts rename test/cases/{global => service}/core/workflow/dispatch/index.test.ts (100%) rename test/cases/{global => service}/core/workflow/utils.ts (76%) create mode 100644 test/cases/service/core/workflow/utils/tarjan.test.ts diff --git a/.claude/skills/system-test/SKILL.md b/.claude/skills/system-test/SKILL.md index 89747909b9..beb0ec3777 100644 --- a/.claude/skills/system-test/SKILL.md +++ b/.claude/skills/system-test/SKILL.md @@ -78,6 +78,14 @@ mock 对应的 API 请求进行测试。 2. 如果测试不通过,则根据错误信息检查代码逻辑或者测试用例。 3. 如需二次修改,则回到”二、测例编写“。 +## 单测包含哪些场景 + +1. 基础场景 +2. 复杂场景 +3. 边界值 +4. 安全边界情况(死循环、系统崩溃、超大数据等) +5. 异常场景 + ## 常用命令 ```shell diff --git a/document/content/docs/self-host/upgrading/4-14/4149.mdx b/document/content/docs/self-host/upgrading/4-14/4149.mdx index 78d1baf35a..abca4bc11e 100644 --- a/document/content/docs/self-host/upgrading/4-14/4149.mdx +++ b/document/content/docs/self-host/upgrading/4-14/4149.mdx @@ -26,9 +26,10 @@ AGENT_SANDBOX_SEALOS_TOKEN= ## 🚀 新增内容 1. 新增 AI 虚拟机功能,可以给 AI 挂载一个虚拟机工具进行更丰富的操作。 -2. 封装 logger sdk。 -3. 更新知识库单个数据时,同步更新 collection 更新时间。 -4. 表单输入文件时,支持打开文件进行预览。 +2. AgentV2 上下文适配暂停态。 +3. 封装 logger sdk。增加 Metrics 追踪。 +4. 更新知识库单个数据时,同步更新 collection 更新时间。 +5. 表单输入文件时,支持打开文件进行预览。 ## ⚙️ 优化 @@ -54,3 +55,4 @@ AGENT_SANDBOX_SEALOS_TOKEN= 13. 系统工具集不显示版本 14. 修复视频音频自定义文件类型流程开始无文件链接变量 15. 用户输入框消息不转义成 markdown 格式 +16. 修复 AgentV2 部分上下文错误。 diff --git a/document/content/docs/toc.en.mdx b/document/content/docs/toc.en.mdx index f8568228ab..2a895153a3 100644 --- a/document/content/docs/toc.en.mdx +++ b/document/content/docs/toc.en.mdx @@ -77,6 +77,7 @@ description: FastGPT Toc - [/en/docs/openapi/share](/en/docs/openapi/share) - [/en/docs/self-host/config/json](/en/docs/self-host/config/json) - [/en/docs/self-host/config/model/intro](/en/docs/self-host/config/model/intro) +- [/en/docs/self-host/config/model/minimax](/en/docs/self-host/config/model/minimax) - [/en/docs/self-host/config/model/siliconCloud](/en/docs/self-host/config/model/siliconCloud) - [/en/docs/self-host/config/object-storage](/en/docs/self-host/config/object-storage) - [/en/docs/self-host/config/signoz](/en/docs/self-host/config/signoz) diff --git a/document/content/docs/toc.mdx b/document/content/docs/toc.mdx index 6141b5f3b0..adb30c6af0 100644 --- a/document/content/docs/toc.mdx +++ b/document/content/docs/toc.mdx @@ -77,6 +77,7 @@ description: FastGPT 文档目录 - [/docs/openapi/share](/docs/openapi/share) - [/docs/self-host/config/json](/docs/self-host/config/json) - [/docs/self-host/config/model/intro](/docs/self-host/config/model/intro) +- [/docs/self-host/config/model/minimax](/docs/self-host/config/model/minimax) - [/docs/self-host/config/model/siliconCloud](/docs/self-host/config/model/siliconCloud) - [/docs/self-host/config/object-storage](/docs/self-host/config/object-storage) - [/docs/self-host/config/signoz](/docs/self-host/config/signoz) diff --git a/document/data/doc-last-modified.json b/document/data/doc-last-modified.json index 16e0f61cda..de514dd7b6 100644 --- a/document/data/doc-last-modified.json +++ b/document/data/doc-last-modified.json @@ -149,6 +149,8 @@ "document/content/docs/self-host/config/json.mdx": "2026-03-03T17:39:47+08:00", "document/content/docs/self-host/config/model/intro.en.mdx": "2026-03-19T14:09:03+08:00", "document/content/docs/self-host/config/model/intro.mdx": "2026-03-19T14:09:03+08:00", + "document/content/docs/self-host/config/model/minimax.en.mdx": "2026-03-19T09:32:57-05:00", + "document/content/docs/self-host/config/model/minimax.mdx": "2026-03-19T09:32:57-05:00", "document/content/docs/self-host/config/model/siliconCloud.en.mdx": "2026-03-19T14:09:03+08:00", "document/content/docs/self-host/config/model/siliconCloud.mdx": "2026-03-19T14:09:03+08:00", "document/content/docs/self-host/config/object-storage.en.mdx": "2026-03-03T17:39:47+08:00", @@ -236,7 +238,7 @@ "document/content/docs/self-host/upgrading/4-14/4148.mdx": "2026-03-09T17:39:53+08:00", "document/content/docs/self-host/upgrading/4-14/41481.en.mdx": "2026-03-09T12:02:02+08:00", "document/content/docs/self-host/upgrading/4-14/41481.mdx": "2026-03-09T17:39:53+08:00", - "document/content/docs/self-host/upgrading/4-14/4149.mdx": "2026-03-19T14:09:03+08:00", + "document/content/docs/self-host/upgrading/4-14/4149.mdx": "2026-03-20T22:01:38+08:00", "document/content/docs/self-host/upgrading/outdated/40.en.mdx": "2026-03-03T17:39:47+08:00", "document/content/docs/self-host/upgrading/outdated/40.mdx": "2026-03-03T17:39:47+08:00", "document/content/docs/self-host/upgrading/outdated/41.en.mdx": "2026-03-03T17:39:47+08:00", @@ -377,8 +379,8 @@ "document/content/docs/self-host/upgrading/outdated/499.mdx": "2026-03-03T17:39:47+08:00", "document/content/docs/self-host/upgrading/upgrade-intruction.en.mdx": "2026-03-03T17:39:47+08:00", "document/content/docs/self-host/upgrading/upgrade-intruction.mdx": "2026-03-03T17:39:47+08:00", - "document/content/docs/toc.en.mdx": "2026-03-19T14:09:03+08:00", - "document/content/docs/toc.mdx": "2026-03-19T14:09:03+08:00", + "document/content/docs/toc.en.mdx": "2026-03-20T21:57:22+08:00", + "document/content/docs/toc.mdx": "2026-03-20T21:57:22+08:00", "document/content/docs/use-cases/app-cases/dalle3.en.mdx": "2026-02-26T22:14:30+08:00", "document/content/docs/use-cases/app-cases/dalle3.mdx": "2025-07-23T21:35:03+08:00", "document/content/docs/use-cases/app-cases/english_essay_correction_bot.en.mdx": "2026-02-26T22:14:30+08:00", diff --git a/packages/global/core/chat/helperBot/topAgent/type.ts b/packages/global/core/chat/helperBot/topAgent/type.ts index 47d4063e87..e2b92780e3 100644 --- a/packages/global/core/chat/helperBot/topAgent/type.ts +++ b/packages/global/core/chat/helperBot/topAgent/type.ts @@ -7,6 +7,7 @@ export const topAgentParamsSchema = z.object({ systemPrompt: z.string().nullish(), selectedTools: z.array(z.string()).nullish(), selectedDatasets: z.array(z.string()).nullish(), - fileUpload: z.boolean().nullish() + fileUpload: z.boolean().nullish(), + enableSandbox: z.boolean().nullish() }); export type TopAgentParamsType = z.infer; diff --git a/packages/service/common/logger/client.ts b/packages/service/common/logger/client.ts index 81e52d20d7..0d1ce804f5 100644 --- a/packages/service/common/logger/client.ts +++ b/packages/service/common/logger/client.ts @@ -1,4 +1,4 @@ -import { configureLoggerFromEnv, disposeLogger, getLogger } from '@fastgpt-sdk/logger'; +import { configureLoggerFromEnv, disposeLogger, getLogger } from '@fastgpt-sdk/otel/logger'; import { env } from '../../env'; export async function configureLogger() { diff --git a/packages/service/common/logger/index.ts b/packages/service/common/logger/index.ts index 0e645948ba..180c2f6eb2 100644 --- a/packages/service/common/logger/index.ts +++ b/packages/service/common/logger/index.ts @@ -1,4 +1,4 @@ export { configureLogger, disposeLogger, getLogger } from './client'; -export { withContext, withCategoryPrefix } from '@fastgpt-sdk/logger'; +export { withContext, withCategoryPrefix } from '@fastgpt-sdk/otel/logger'; export { LogCategories } from './categories'; export type { LogCategory } from './categories'; diff --git a/packages/service/common/metrics/client.ts b/packages/service/common/metrics/client.ts new file mode 100644 index 0000000000..bb867d4b8b --- /dev/null +++ b/packages/service/common/metrics/client.ts @@ -0,0 +1,12 @@ +import { configureMetricsFromEnv, disposeMetrics, getMeter } from '@fastgpt-sdk/otel/metrics'; +import { env } from '../../env'; + +export async function configureMetrics() { + await configureMetricsFromEnv({ + env, + defaultServiceName: 'fastgpt-client', + defaultMeterName: 'fastgpt-client' + }); +} + +export { disposeMetrics, getMeter }; diff --git a/packages/service/common/metrics/index.ts b/packages/service/common/metrics/index.ts new file mode 100644 index 0000000000..6fce757855 --- /dev/null +++ b/packages/service/common/metrics/index.ts @@ -0,0 +1 @@ +export { configureMetrics, disposeMetrics, getMeter } from './client'; diff --git a/packages/service/common/middle/entry.ts b/packages/service/common/middle/entry.ts index 3527fb8ded..aab227be7c 100644 --- a/packages/service/common/middle/entry.ts +++ b/packages/service/common/middle/entry.ts @@ -1,8 +1,10 @@ import { jsonRes } from '../response'; import type { NextApiRequest, NextApiResponse } from 'next'; +import { SpanStatusCode } from '@opentelemetry/api'; import { withNextCors } from './cors'; import { type ApiRequestProps } from '../../type/next'; import { getLogger, LogCategories, withContext } from '../logger'; +import { setSpanError, withActiveSpan } from '../tracing'; import { ZodError } from 'zod'; import { randomUUID } from 'crypto'; @@ -24,7 +26,6 @@ export const NextEntry = ({ const requestLogger = getLogger(LogCategories.HTTP.REQUEST); const responseLogger = getLogger(LogCategories.HTTP.RESPONSE); - const errorLogger = getLogger(LogCategories.HTTP.ERROR); const url = req.url || ''; const method = req.method?.toUpperCase() || ''; @@ -32,75 +33,105 @@ export const NextEntry = ({ const userAgent = req.headers['user-agent']; const contentLength = req.headers['content-length']; - return withContext({ requestId }, async () => { - requestLogger.info(`[${method}] ${url}`, { - verbose: false, - requestId, - method, - url, - ip, - userAgent, - contentLength - }); + return withContext({ requestId }, async () => + withActiveSpan( + { + name: `http.request ${method || 'UNKNOWN'} ${url || '/'}`, + tracerName: 'fastgpt.http', + attributes: { + 'fastgpt.request.id': requestId, + 'http.request.method': method, + 'url.full': url, + 'client.address': Array.isArray(ip) ? ip.join(',') : ip, + 'user_agent.original': userAgent, + 'http.request.body.size': contentLength + } + }, + async (span) => { + requestLogger.info(`[${method}] ${url}`, { + verbose: false, + requestId, + method, + url, + ip, + userAgent, + contentLength + }); - let responseLogged = false; - const logResponse = (event: 'request-finish' | 'request-close') => { - if (responseLogged) return; - responseLogged = true; - const durationMs = Date.now() - start; - const httpStatusCode = res.statusCode; + let responseLogged = false; + const logResponse = (event: 'request-finish' | 'request-close') => { + if (responseLogged) return; + responseLogged = true; + const durationMs = Date.now() - start; + const httpStatusCode = res.statusCode; - responseLogger.info(`[${method}] ${url} - ${httpStatusCode} in ${durationMs}ms`, { - verbose: false, - requestId, - method, - httpStatusCode, - event - }); - }; + responseLogger.info(`[${method}] ${url} - ${httpStatusCode} in ${durationMs}ms`, { + verbose: false, + requestId, + method, + httpStatusCode, + event + }); + }; - res.once('finish', () => logResponse('request-finish')); - res.once('close', () => logResponse('request-close')); + res.once('finish', () => logResponse('request-finish')); + res.once('close', () => logResponse('request-close')); - try { - await Promise.all([ - withNextCors(req, res), - ...beforeCallback.map((item) => item(req, res)) - ]); + try { + await Promise.all([ + withNextCors(req, res), + ...beforeCallback.map((item) => item(req, res)) + ]); - let response = null; - for await (const handler of args) { - response = await handler(req, res); - if (res.writableFinished) { - break; + let response = null; + for await (const handler of args) { + response = await handler(req, res); + if (res.writableFinished) { + break; + } + } + + const contentType = res.getHeader('Content-Type'); + if ((!contentType || contentType === 'application/json') && !res.writableFinished) { + const jsonResponse = await jsonRes(res, { + code: 200, + data: response + }); + + span.setAttribute('http.response.status_code', res.statusCode); + return jsonResponse; + } + + span.setAttribute('http.response.status_code', res.statusCode); + } catch (error) { + // Handle Zod validation errors + if (error instanceof ZodError) { + span.setAttribute('http.response.status_code', 400); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: 'Data validation error' + }); + + return jsonRes(res, { + code: 400, + message: 'Data validation error', + error, + url: req.url + }); + } + + span.setAttribute('http.response.status_code', 500); + setSpanError(span, error); + + return jsonRes(res, { + code: 500, + error, + url: req.url + }); } } - - const contentType = res.getHeader('Content-Type'); - if ((!contentType || contentType === 'application/json') && !res.writableFinished) { - return jsonRes(res, { - code: 200, - data: response - }); - } - } catch (error) { - // Handle Zod validation errors - if (error instanceof ZodError) { - return jsonRes(res, { - code: 400, - message: 'Data validation error', - error, - url: req.url - }); - } - - return jsonRes(res, { - code: 500, - error, - url: req.url - }); - } - }); + ) + ); }; }; }; diff --git a/packages/service/common/tracing/client.ts b/packages/service/common/tracing/client.ts new file mode 100644 index 0000000000..1ca91ec5c9 --- /dev/null +++ b/packages/service/common/tracing/client.ts @@ -0,0 +1,114 @@ +import { getErrText } from '@fastgpt/global/common/error/utils'; +import { SpanStatusCode } from '@opentelemetry/api'; +import { + configureTracingFromEnv, + disposeTracing, + getCurrentSpanContext, + getTracer +} from '@fastgpt-sdk/otel/tracing'; +import { withContext } from '../logger'; +import { env } from '../../env'; + +type SpanAttributeValue = string | number | boolean; +type SpanStatusLike = { + code?: number; + message?: string; +}; +type TracerLike = ReturnType; +type SpanLike = ReturnType; + +export type TraceLogContext = { + traceId: string; + spanId: string; +}; + +export type ActiveSpanOptions = { + name: string; + tracer?: TracerLike; + tracerName?: string; + attributes?: Record; +}; + +function normalizeAttributes(attributes?: Record) { + if (!attributes) return; + + const normalized: Record = {}; + + Object.entries(attributes).forEach(([key, value]) => { + if (value === undefined || value === null) return; + + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + normalized[key] = value satisfies SpanAttributeValue; + return; + } + }); + + return Object.keys(normalized).length > 0 ? normalized : undefined; +} + +export async function configureTracing() { + await configureTracingFromEnv({ + env, + defaultServiceName: 'fastgpt-client', + defaultTracerName: 'fastgpt-client', + defaultSampleRatio: env.TRACING_OTEL_SAMPLE_RATIO + }); +} + +export function getTraceLogContext(): TraceLogContext | undefined { + const spanContext = getCurrentSpanContext(); + if (!spanContext) return; + + return { + traceId: spanContext.traceId, + spanId: spanContext.spanId + }; +} + +export function setSpanError( + span: SpanLike, + error: unknown, + extraStatus?: Partial +) { + span.recordException(error instanceof Error ? error : new Error(getErrText(error))); + span.setStatus({ + code: SpanStatusCode.ERROR, + message: extraStatus?.message ?? getErrText(error) + }); +} + +export async function withActiveSpan( + options: ActiveSpanOptions, + callback: (span: SpanLike) => Promise | T +): Promise { + const tracer = options.tracer ?? getTracer(options.tracerName); + + return tracer.startActiveSpan( + options.name, + { + attributes: normalizeAttributes(options.attributes) + }, + async (span: SpanLike) => { + const spanContext = span.spanContext(); + + return withContext( + { + traceId: spanContext.traceId, + spanId: spanContext.spanId + }, + async () => { + try { + return await callback(span); + } catch (error) { + setSpanError(span, error); + throw error; + } finally { + span.end(); + } + } + ); + } + ); +} + +export { disposeTracing, getCurrentSpanContext, getTracer }; diff --git a/packages/service/common/tracing/index.ts b/packages/service/common/tracing/index.ts new file mode 100644 index 0000000000..960e2f28fc --- /dev/null +++ b/packages/service/common/tracing/index.ts @@ -0,0 +1,10 @@ +export { + configureTracing, + disposeTracing, + getCurrentSpanContext, + getTraceLogContext, + getTracer, + setSpanError, + withActiveSpan +} from './client'; +export type { ActiveSpanOptions, TraceLogContext } from './client'; diff --git a/packages/service/core/chat/HelperBot/dispatch/topAgent/index.ts b/packages/service/core/chat/HelperBot/dispatch/topAgent/index.ts index b382042f75..9130f3a0dd 100644 --- a/packages/service/core/chat/HelperBot/dispatch/topAgent/index.ts +++ b/packages/service/core/chat/HelperBot/dispatch/topAgent/index.ts @@ -160,6 +160,7 @@ export const dispatchTopAgent = async ( tools, // 从 execution_plan 提取 datasets: filterDatasets, fileUploadEnabled: responseJson.resources?.system_features?.file_upload?.enabled || false, + enableSandboxEnabled: responseJson.resources?.system_features?.sandbox?.enabled || false, executionPlan: responseJson.execution_plan // 保存原始 execution_plan }); diff --git a/packages/service/core/chat/HelperBot/dispatch/topAgent/prompt.ts b/packages/service/core/chat/HelperBot/dispatch/topAgent/prompt.ts index 9ce6bb3f3b..8052f0ab0b 100644 --- a/packages/service/core/chat/HelperBot/dispatch/topAgent/prompt.ts +++ b/packages/service/core/chat/HelperBot/dispatch/topAgent/prompt.ts @@ -34,6 +34,12 @@ export const getPrompt = ({ ); } + if (metadata.enableSandbox !== undefined && metadata.enableSandbox !== null) { + sections.push( + `**虚拟机**: ${metadata.enableSandbox ? '搭建者已启用虚拟机功能' : '搭建者已禁用虚拟机功能'}` + ); + } + if (sections.length === 0) return ''; return ` @@ -337,6 +343,8 @@ ${resourceList} - 系统功能判断: * 是否需要用户的私有文件?→ 启用 file_upload * 数据能否通过工具获取?→ 不需要 file_upload + * 是否需要执行代码或数据处理(如运行 Python 脚本、复杂计算、数据转换)?→ 启用 sandbox + * 任务仅需 LLM 推理和工具调用,无需执行任意代码?→ 不需要 sandbox 🔧 第四层:资源整合 - 收集所有需要的工具、知识库和系统功能 @@ -377,6 +385,10 @@ ${resourceList} "file_upload": { "enabled": true/false, "purpose": "说明原因(enabled=true时必填)" + }, + "sandbox": { + "enabled": true/false, + "purpose": "说明为何需要虚拟机执行能力(enabled=true时必填,适用于代码执行、数据处理等场景)" } } } @@ -395,6 +407,8 @@ ${resourceList} - resources: 资源配置对象,仅包含系统功能配置 * system_features.file_upload.enabled: 是否需要文件上传(必填) * system_features.file_upload.purpose: 为什么需要(enabled=true时必填) + * system_features.sandbox.enabled: 是否需要虚拟机执行能力(可选,适用于代码执行、数据处理场景) + * system_features.sandbox.purpose: 为什么需要虚拟机(enabled=true时必填) **执行计划设计**: diff --git a/packages/service/core/chat/HelperBot/dispatch/topAgent/type.ts b/packages/service/core/chat/HelperBot/dispatch/topAgent/type.ts index 1144fb8b54..99b97c59e4 100644 --- a/packages/service/core/chat/HelperBot/dispatch/topAgent/type.ts +++ b/packages/service/core/chat/HelperBot/dispatch/topAgent/type.ts @@ -27,6 +27,7 @@ export const TopAgentFormDataSchema = z.object({ tools: z.array(z.string()).optional().default([]), datasets: z.array(SelectedDatasetSchema).optional().default([]), fileUploadEnabled: z.boolean().optional().default(false), + enableSandboxEnabled: z.boolean().optional().default(false), executionPlan: z.any().optional() }); export type TopAgentFormDataType = z.infer; @@ -47,10 +48,19 @@ export const TopAgentGenerationAnswerSchema = z.object({ execution_plan: ExecutionPlanSchema.optional(), resources: z.object({ system_features: z.object({ - file_upload: z.object({ - enabled: z.boolean(), - purpose: z.string().optional() - }) + file_upload: z + .object({ + enabled: z.boolean(), + purpose: z.string().optional() + }) + .optional() + .default({ enabled: false }), + sandbox: z + .object({ + enabled: z.boolean() + }) + .optional() + .default({ enabled: false }) }) }) }); diff --git a/packages/service/core/chat/HelperBot/dispatch/topAgent/utils.ts b/packages/service/core/chat/HelperBot/dispatch/topAgent/utils.ts index d32194a7c0..0c00efda8c 100644 --- a/packages/service/core/chat/HelperBot/dispatch/topAgent/utils.ts +++ b/packages/service/core/chat/HelperBot/dispatch/topAgent/utils.ts @@ -65,7 +65,8 @@ ${tool} ${dataset} ### 系统功能 -- **file_upload**: 文件上传功能 (enabled, purpose, file_types) +- **file_upload**: 文件上传功能,允许用户在对话中上传文件,让 Agent 读取私有文件内容 +- **sandbox**: 虚拟机执行环境,为 Agent 提供代码运行能力(Python、Shell 等),适用于数据处理、科学计算、代码执行等场景 `; }; @@ -109,10 +110,12 @@ ${dataset} }) ]); - const allTools = [...systemTools, ...myTools]; - const fileReadInfo = systemSubInfo[SubAppIds.fileRead]; - const fileReadTool = `- **${SubAppIds.fileRead}** [工具]: ${parseI18nString(fileReadInfo.name, lang)} - ${fileReadInfo.toolDescription}`; - allTools.push(fileReadTool); + const builtinTools = [SubAppIds.fileRead, SubAppIds.sandboxTool].map((id) => { + const info = systemSubInfo[id]; + return `- **${id}** [工具]: ${parseI18nString(info.name, lang)} - ${info.toolDescription}`; + }); + + const allTools = [...systemTools, ...myTools, ...builtinTools]; return { resourceList: getPrompt({ diff --git a/packages/service/core/workflow/dispatch/ai/agent/index.ts b/packages/service/core/workflow/dispatch/ai/agent/index.ts index 2bd6e3d741..c510f3105e 100644 --- a/packages/service/core/workflow/dispatch/ai/agent/index.ts +++ b/packages/service/core/workflow/dispatch/ai/agent/index.ts @@ -89,6 +89,7 @@ export const dispatchRunAgent = async (props: DispatchAgentModuleProps): Promise userChatInput, // 本次任务的输入 history = 6, fileUrlList: fileLinks, + aiChatVision = true, agent_selectedTools: selectedTools = [], // Dataset search configuration agent_datasetParams: datasetParams, diff --git a/packages/service/core/workflow/dispatch/index.ts b/packages/service/core/workflow/dispatch/index.ts index 49f7ab7a6a..dc0dcd7c46 100644 --- a/packages/service/core/workflow/dispatch/index.ts +++ b/packages/service/core/workflow/dispatch/index.ts @@ -1,5 +1,6 @@ import { getNanoid } from '@fastgpt/global/common/string/tools'; import { getSystemTime } from '@fastgpt/global/common/time/timezone'; +import { SpanStatusCode } from '@opentelemetry/api'; import type { AIChatItemValueItemType, ChatHistoryItemResType, @@ -60,6 +61,8 @@ import { TeamErrEnum } from '@fastgpt/global/common/error/code/team'; import { i18nT } from '../../../../web/i18n/utils'; import { validateFileUrlDomain } from '../../../common/security/fileUrlValidator'; import { classifyEdgesByDFS, findSCCs, isNodeInCycle, getEdgeType } from '../utils/tarjan'; +import { observeWorkflowStep } from '../metrics'; +import { withActiveSpan } from '../../../common/tracing'; const logger = getLogger(LogCategories.MODULE.WORKFLOW.DISPATCH); import { delAgentRuntimeStopSign, shouldWorkflowStop } from './workflowStatus'; @@ -736,233 +739,278 @@ export class WorkflowQueue { runStatus: 'run'; result: NodeResponseCompleteType; }> { - /* Inject data into module input */ - const getNodeRunParams = (node: RuntimeNodeItemType) => { - if (node.flowNodeType === FlowNodeTypeEnum.pluginInput) { - // Format plugin input to object - return node.inputs.reduce>((acc, item) => { - acc[item.key] = valueTypeFormat(item.value, item.valueType); - return acc; - }, {}); - } - - // Dynamic input need to store a key. - const dynamicInput = node.inputs.find( - (item) => item.renderTypeList[0] === FlowNodeInputTypeEnum.addInputParam - ); - const params: Record = dynamicInput - ? { - [dynamicInput.key]: {} - } - : {}; - - node.inputs.forEach((input) => { - // Special input, not format - if (input.key === dynamicInput?.key) return; - - // Skip some special key - if ( - [NodeInputKeyEnum.childrenNodeIdList, NodeInputKeyEnum.httpJsonBody].includes( - input.key as NodeInputKeyEnum - ) - ) { - params[input.key] = input.value; - return; - } - - // replace {{$xx.xx$}} and {{xx}} variables - let value = replaceEditorVariable({ - text: input.value, - nodes: this.data.runtimeNodes, - variables: this.data.variables - }); - - // replace reference variables - value = getReferenceVariableValue({ - value, - nodes: this.data.runtimeNodes, - variables: this.data.variables - }); - - // Dynamic input is stored in the dynamic key - if (input.canEdit && dynamicInput && params[dynamicInput.key]) { - params[dynamicInput.key][input.key] = valueTypeFormat(value, input.valueType); - } - params[input.key] = valueTypeFormat(value, input.valueType); - }); - - return params; - }; - - // push run status messages - if (node.showStatus && !this.data.isToolCall) { - this.data.workflowStreamResponse?.({ - event: SseResponseEventEnum.flowNodeStatus, - data: { - status: 'running', - name: node.name - } - }); - } - const startTime = Date.now(); - - // get node running params - const params = getNodeRunParams(node); - - const dispatchData: ModuleDispatchProps> = { - ...this.data, - usagePush: this.usagePush.bind(this), - lastInteractive: this.data.lastInteractive?.entryNodeIds?.includes(node.nodeId) - ? this.data.lastInteractive - : undefined, - variables: this.data.variables, - histories: this.data.histories, - retainDatasetCite: this.data.retainDatasetCite, - node, - runtimeNodes: this.data.runtimeNodes, - runtimeEdges: this.data.runtimeEdges, - params, + const stepMetricAttributes = { + workflowId: this.data.runningAppInfo.id, + workflowName: this.data.runningAppInfo.name, + nodeId: node.nodeId, + nodeName: node.name, + nodeType: node.flowNodeType, mode: this.isDebugMode ? 'test' : this.data.mode }; - // run module - const dispatchRes: NodeResponseType = await (async () => { - if (callbackMap[node.flowNodeType]) { - const targetEdges = this.edgeIndex.bySource.get(node.nodeId) || []; - const errorHandleId = getHandleId(node.nodeId, 'source_catch', 'right'); - - try { - const result = (await callbackMap[node.flowNodeType](dispatchData)) as NodeResponseType; - - if (result.error) { - // Run error and not catch error, skip all edges - if (!node.catchError) { - return { - ...result, - [DispatchNodeResponseKeyEnum.skipHandleId]: targetEdges.map( - (item) => item.sourceHandle - ) - }; + return observeWorkflowStep(stepMetricAttributes, () => + withActiveSpan( + { + name: `workflow.step ${node.name || node.nodeId}`, + tracerName: 'fastgpt.workflow', + attributes: { + 'fastgpt.workflow.id': this.data.runningAppInfo.id, + 'fastgpt.workflow.name': this.data.runningAppInfo.name, + 'fastgpt.workflow.node.id': node.nodeId, + 'fastgpt.workflow.node.name': node.name, + 'fastgpt.workflow.node.type': node.flowNodeType, + 'fastgpt.workflow.mode': stepMetricAttributes.mode + } + }, + async (stepSpan) => { + /* Inject data into module input */ + const getNodeRunParams = (node: RuntimeNodeItemType) => { + if (node.flowNodeType === FlowNodeTypeEnum.pluginInput) { + // Format plugin input to object + return node.inputs.reduce>((acc, item) => { + acc[item.key] = valueTypeFormat(item.value, item.valueType); + return acc; + }, {}); } - // Catch error, skip unError handle - const skipHandleIds = targetEdges - .filter((item) => item.sourceHandle !== errorHandleId) - .map((item) => item.sourceHandle); + // Dynamic input need to store a key. + const dynamicInput = node.inputs.find( + (item) => item.renderTypeList[0] === FlowNodeInputTypeEnum.addInputParam + ); + const params: Record = dynamicInput + ? { + [dynamicInput.key]: {} + } + : {}; - return { - ...result, - [DispatchNodeResponseKeyEnum.skipHandleId]: result[ - DispatchNodeResponseKeyEnum.skipHandleId - ] - ? [...result[DispatchNodeResponseKeyEnum.skipHandleId], ...skipHandleIds].filter( - Boolean - ) - : skipHandleIds + node.inputs.forEach((input) => { + // Special input, not format + if (input.key === dynamicInput?.key) return; + + // Skip some special key + if ( + [NodeInputKeyEnum.childrenNodeIdList, NodeInputKeyEnum.httpJsonBody].includes( + input.key as NodeInputKeyEnum + ) + ) { + params[input.key] = input.value; + return; + } + + // replace {{$xx.xx$}} and {{xx}} variables + let value = replaceEditorVariable({ + text: input.value, + nodes: this.data.runtimeNodes, + variables: this.data.variables + }); + + // replace reference variables + value = getReferenceVariableValue({ + value, + nodes: this.data.runtimeNodes, + variables: this.data.variables + }); + + // Dynamic input is stored in the dynamic key + if (input.canEdit && dynamicInput && params[dynamicInput.key]) { + params[dynamicInput.key][input.key] = valueTypeFormat(value, input.valueType); + } + params[input.key] = valueTypeFormat(value, input.valueType); + }); + + return params; + }; + + // push run status messages + if (node.showStatus && !this.data.isToolCall) { + this.data.workflowStreamResponse?.({ + event: SseResponseEventEnum.flowNodeStatus, + data: { + status: 'running', + name: node.name + } + }); + } + const startTime = Date.now(); + + // get node running params + const params = getNodeRunParams(node); + + const dispatchData: ModuleDispatchProps> = { + ...this.data, + usagePush: this.usagePush.bind(this), + lastInteractive: this.data.lastInteractive?.entryNodeIds?.includes(node.nodeId) + ? this.data.lastInteractive + : undefined, + variables: this.data.variables, + histories: this.data.histories, + retainDatasetCite: this.data.retainDatasetCite, + node, + runtimeNodes: this.data.runtimeNodes, + runtimeEdges: this.data.runtimeEdges, + params, + mode: this.isDebugMode ? 'test' : this.data.mode + }; + + // run module + const dispatchRes: NodeResponseType = await (async () => { + if (callbackMap[node.flowNodeType]) { + const targetEdges = this.edgeIndex.bySource.get(node.nodeId) || []; + const errorHandleId = getHandleId(node.nodeId, 'source_catch', 'right'); + + try { + const result = (await callbackMap[node.flowNodeType]( + dispatchData + )) as NodeResponseType; + + if (result.error) { + // Run error and not catch error, skip all edges + if (!node.catchError) { + return { + ...result, + [DispatchNodeResponseKeyEnum.skipHandleId]: targetEdges.map( + (item) => item.sourceHandle + ) + }; + } + + // Catch error, skip unError handle + const skipHandleIds = targetEdges + .filter((item) => item.sourceHandle !== errorHandleId) + .map((item) => item.sourceHandle); + + return { + ...result, + [DispatchNodeResponseKeyEnum.skipHandleId]: result[ + DispatchNodeResponseKeyEnum.skipHandleId + ] + ? [ + ...result[DispatchNodeResponseKeyEnum.skipHandleId], + ...skipHandleIds + ].filter(Boolean) + : skipHandleIds + }; + } + + // Not error + const errorHandle = + targetEdges.find((item) => item.sourceHandle === errorHandleId)?.sourceHandle || + ''; + + return { + ...result, + [DispatchNodeResponseKeyEnum.skipHandleId]: (result[ + DispatchNodeResponseKeyEnum.skipHandleId + ] + ? [...result[DispatchNodeResponseKeyEnum.skipHandleId], errorHandle] + : [errorHandle] + ).filter(Boolean) + }; + } catch (error) { + // Skip all edges and return error + let skipHandleId = targetEdges.map((item) => item.sourceHandle); + if (node.catchError) { + skipHandleId = skipHandleId.filter((item) => item !== errorHandleId); + } + + return { + [DispatchNodeResponseKeyEnum.nodeResponse]: { + error: getErrText(error) + }, + [DispatchNodeResponseKeyEnum.skipHandleId]: skipHandleId + }; + } + } + return {}; + })(); + + const nodeResponses = dispatchRes[DispatchNodeResponseKeyEnum.nodeResponses] || []; + // format response data. Add modulename and module type + const formatResponseData: NodeResponseCompleteType['responseData'] = (() => { + if (!dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]) return undefined; + + const val = { + moduleName: node.name, + moduleType: node.flowNodeType, + moduleLogo: node.avatar, + ...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse], + id: getNanoid(), + nodeId: node.nodeId, + runningTime: +((Date.now() - startTime) / 1000).toFixed(2) + }; + nodeResponses.push(val); + return val; + })(); + + // Response node response + if ( + this.data.apiVersion === 'v2' && + !this.data.isToolCall && + this.isRootRuntime && + nodeResponses.length > 0 + ) { + const filteredResponses = this.data.responseAllData + ? nodeResponses + : filterPublicNodeResponseData({ + nodeRespones: nodeResponses, + responseDetail: this.data.responseDetail + }); + + filteredResponses.forEach((item) => { + this.data.workflowStreamResponse?.({ + event: SseResponseEventEnum.flowNodeResponse, + data: item + }); + }); + } + + // Add output default value + if (dispatchRes.data) { + node.outputs.forEach((item) => { + if (!item.required) return; + if (dispatchRes.data?.[item.key] !== undefined) return; + dispatchRes.data![item.key] = valueTypeFormat(item.defaultValue, item.valueType); + }); + } + + // Update new variables + if (dispatchRes[DispatchNodeResponseKeyEnum.newVariables]) { + this.data.variables = { + ...this.data.variables, + ...dispatchRes[DispatchNodeResponseKeyEnum.newVariables] }; } - // Not error - const errorHandle = - targetEdges.find((item) => item.sourceHandle === errorHandleId)?.sourceHandle || ''; + // Error + if (dispatchRes?.responseData?.error) { + stepSpan.setAttribute('fastgpt.workflow.step.error', true); + stepSpan.setStatus({ + code: SpanStatusCode.ERROR, + message: String(dispatchRes.responseData.error) + }); + logger.warn('Workflow node returned error', { error: dispatchRes.responseData.error }); + } else { + stepSpan.setStatus({ code: SpanStatusCode.OK }); + } - return { - ...result, - [DispatchNodeResponseKeyEnum.skipHandleId]: (result[ - DispatchNodeResponseKeyEnum.skipHandleId - ] - ? [...result[DispatchNodeResponseKeyEnum.skipHandleId], errorHandle] - : [errorHandle] - ).filter(Boolean) - }; - } catch (error) { - // Skip all edges and return error - let skipHandleId = targetEdges.map((item) => item.sourceHandle); - if (node.catchError) { - skipHandleId = skipHandleId.filter((item) => item !== errorHandleId); + if (formatResponseData?.runningTime !== undefined) { + stepSpan.setAttribute( + 'fastgpt.workflow.step.running_time_seconds', + formatResponseData.runningTime + ); } return { - [DispatchNodeResponseKeyEnum.nodeResponse]: { - error: getErrText(error) - }, - [DispatchNodeResponseKeyEnum.skipHandleId]: skipHandleId + node, + runStatus: 'run', + result: { + ...dispatchRes, + [DispatchNodeResponseKeyEnum.nodeResponse]: formatResponseData + } }; } - } - return {}; - })(); - - const nodeResponses = dispatchRes[DispatchNodeResponseKeyEnum.nodeResponses] || []; - // format response data. Add modulename and module type - const formatResponseData: NodeResponseCompleteType['responseData'] = (() => { - if (!dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse]) return undefined; - - const val = { - moduleName: node.name, - moduleType: node.flowNodeType, - moduleLogo: node.avatar, - ...dispatchRes[DispatchNodeResponseKeyEnum.nodeResponse], - id: getNanoid(), - nodeId: node.nodeId, - runningTime: +((Date.now() - startTime) / 1000).toFixed(2) - }; - nodeResponses.push(val); - return val; - })(); - - // Response node response - if ( - this.data.apiVersion === 'v2' && - !this.data.isToolCall && - this.isRootRuntime && - nodeResponses.length > 0 - ) { - const filteredResponses = this.data.responseAllData - ? nodeResponses - : filterPublicNodeResponseData({ - nodeRespones: nodeResponses, - responseDetail: this.data.responseDetail - }); - - filteredResponses.forEach((item) => { - this.data.workflowStreamResponse?.({ - event: SseResponseEventEnum.flowNodeResponse, - data: item - }); - }); - } - - // Add output default value - if (dispatchRes.data) { - node.outputs.forEach((item) => { - if (!item.required) return; - if (dispatchRes.data?.[item.key] !== undefined) return; - dispatchRes.data![item.key] = valueTypeFormat(item.defaultValue, item.valueType); - }); - } - - // Update new variables - if (dispatchRes[DispatchNodeResponseKeyEnum.newVariables]) { - this.data.variables = { - ...this.data.variables, - ...dispatchRes[DispatchNodeResponseKeyEnum.newVariables] - }; - } - - // Error - if (dispatchRes?.responseData?.error) { - logger.warn('Workflow node returned error', { error: dispatchRes.responseData.error }); - } - - return { - node, - runStatus: 'run', - result: { - ...dispatchRes, - [DispatchNodeResponseKeyEnum.nodeResponse]: formatResponseData - } - }; + ) + ); } private nodeRunWithSkip(node: RuntimeNodeItemType): { node: RuntimeNodeItemType; @@ -1342,6 +1390,7 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise 20) { return { flowResponses: [], @@ -1365,95 +1414,127 @@ export const runWorkflow = async (data: RunWorkflowProps): Promise item.isEntry); - // Reset entry - runtimeNodes.forEach((item) => { - // Interactively nodes will use the "isEntry", which does not need to be updated - if ( - item.flowNodeType !== FlowNodeTypeEnum.userSelect && - item.flowNodeType !== FlowNodeTypeEnum.formInput && - item.flowNodeType !== FlowNodeTypeEnum.toolCall - ) { - item.isEntry = false; - } - }); - - const workflowQueue = await new Promise((resolve) => { - logger.info('Workflow run start', { - maxRunTimes: data.maxRunTimes, - appId: data.runningAppInfo.id - }); - const workflowQueue = new WorkflowQueue({ - data, - resolve, - defaultSkipNodeQueue: data.lastInteractive?.skipNodeQueue || data.defaultSkipNodeQueue - }); - - entryNodes.forEach((node) => { - workflowQueue.addActiveNode(node.nodeId); - }); - }); - - // Get interactive node response. - const interactiveResult = (() => { - if (workflowQueue.nodeInteractiveResponse) { - const interactiveAssistant = workflowQueue.handleInteractiveResult({ - entryNodeIds: workflowQueue.nodeInteractiveResponse.entryNodeIds, - interactiveResponse: workflowQueue.nodeInteractiveResponse.interactiveResponse - }); - if (workflowQueue.isRootRuntime) { - workflowQueue.chatAssistantResponse.push(interactiveAssistant); + return withActiveSpan( + { + name: isRootRuntime ? 'workflow.run' : 'workflow.child.run', + tracerName: 'fastgpt.workflow', + attributes: { + 'fastgpt.workflow.id': data.runningAppInfo.id, + 'fastgpt.workflow.name': data.runningAppInfo.name, + 'fastgpt.workflow.mode': data.mode, + 'fastgpt.workflow.depth': data.workflowDispatchDeep, + 'fastgpt.workflow.is_root': isRootRuntime, + 'fastgpt.workflow.chat_id': data.chatId, + 'fastgpt.workflow.app_version': data.apiVersion, + 'fastgpt.workflow.is_tool_call': !!data.isToolCall, + 'fastgpt.workflow.node_count': runtimeNodes.length, + 'fastgpt.workflow.edge_count': runtimeEdges.length } - return interactiveAssistant.interactive; + }, + async (workflowSpan) => { + const startTime = Date.now(); + + await rewriteRuntimeWorkFlow({ nodes: runtimeNodes, edges: runtimeEdges, lang: data.lang }); + // Init default value + data.retainDatasetCite = data.retainDatasetCite ?? true; + data.responseDetail = data.responseDetail ?? true; + data.responseAllData = data.responseAllData ?? true; + + // Start process width initInput + const entryNodes = runtimeNodes.filter((item) => item.isEntry); + // Reset entry + runtimeNodes.forEach((item) => { + // Interactively nodes will use the "isEntry", which does not need to be updated + if ( + item.flowNodeType !== FlowNodeTypeEnum.userSelect && + item.flowNodeType !== FlowNodeTypeEnum.formInput && + item.flowNodeType !== FlowNodeTypeEnum.toolCall + ) { + item.isEntry = false; + } + }); + + const workflowQueue = await new Promise((resolve) => { + logger.info('Workflow run start', { + maxRunTimes: data.maxRunTimes, + appId: data.runningAppInfo.id + }); + const workflowQueue = new WorkflowQueue({ + data, + resolve, + defaultSkipNodeQueue: data.lastInteractive?.skipNodeQueue || data.defaultSkipNodeQueue + }); + + entryNodes.forEach((node) => { + workflowQueue.addActiveNode(node.nodeId); + }); + }); + + // Get interactive node response. + const interactiveResult = (() => { + if (workflowQueue.nodeInteractiveResponse) { + const interactiveAssistant = workflowQueue.handleInteractiveResult({ + entryNodeIds: workflowQueue.nodeInteractiveResponse.entryNodeIds, + interactiveResponse: workflowQueue.nodeInteractiveResponse.interactiveResponse + }); + if (workflowQueue.isRootRuntime) { + workflowQueue.chatAssistantResponse.push(interactiveAssistant); + } + return interactiveAssistant.interactive; + } + })(); + + const durationSeconds = +((Date.now() - startTime) / 1000).toFixed(2); + + workflowSpan.setAttribute('fastgpt.workflow.duration_seconds', durationSeconds); + workflowSpan.setAttribute('fastgpt.workflow.run_times', workflowQueue.workflowRunTimes); + workflowSpan.setAttribute( + 'fastgpt.workflow.has_interactive_response', + !!workflowQueue.nodeInteractiveResponse + ); + workflowSpan.setStatus({ code: SpanStatusCode.OK }); + + if (isRootRuntime) { + data.workflowStreamResponse?.({ + event: SseResponseEventEnum.workflowDuration, + data: { durationSeconds } + }); + } + + return { + flowResponses: workflowQueue.chatResponses, + flowUsages: workflowQueue.chatNodeUsages, + debugResponse: workflowQueue.getDebugResponse(), + workflowInteractiveResponse: interactiveResult, + [DispatchNodeResponseKeyEnum.runTimes]: workflowQueue.workflowRunTimes, + [DispatchNodeResponseKeyEnum.assistantResponses]: mergeAssistantResponseAnswerText( + workflowQueue.chatAssistantResponse + ), + [DispatchNodeResponseKeyEnum.toolResponses]: workflowQueue.toolRunResponse, + [DispatchNodeResponseKeyEnum.newVariables]: runtimeSystemVar2StoreType({ + variables, + removeObj: externalProvider.externalWorkflowVariables, + userVariablesConfigs: data.chatConfig?.variables + }), + [DispatchNodeResponseKeyEnum.memories]: + Object.keys(workflowQueue.system_memories).length > 0 + ? workflowQueue.system_memories + : undefined, + [DispatchNodeResponseKeyEnum.customFeedbacks]: + workflowQueue.customFeedbackList.length > 0 + ? workflowQueue.customFeedbackList + : undefined, + durationSeconds + }; } - })(); - - const durationSeconds = +((Date.now() - startTime) / 1000).toFixed(2); - - if (workflowQueue.isRootRuntime) { - data.workflowStreamResponse?.({ - event: SseResponseEventEnum.workflowDuration, - data: { durationSeconds } - }); - } - - return { - flowResponses: workflowQueue.chatResponses, - flowUsages: workflowQueue.chatNodeUsages, - debugResponse: workflowQueue.getDebugResponse(), - workflowInteractiveResponse: interactiveResult, - [DispatchNodeResponseKeyEnum.runTimes]: workflowQueue.workflowRunTimes, - [DispatchNodeResponseKeyEnum.assistantResponses]: mergeAssistantResponseAnswerText( - workflowQueue.chatAssistantResponse - ), - [DispatchNodeResponseKeyEnum.toolResponses]: workflowQueue.toolRunResponse, - [DispatchNodeResponseKeyEnum.newVariables]: runtimeSystemVar2StoreType({ - variables, - removeObj: externalProvider.externalWorkflowVariables, - userVariablesConfigs: data.chatConfig?.variables - }), - [DispatchNodeResponseKeyEnum.memories]: - Object.keys(workflowQueue.system_memories).length > 0 - ? workflowQueue.system_memories - : undefined, - [DispatchNodeResponseKeyEnum.customFeedbacks]: - workflowQueue.customFeedbackList.length > 0 ? workflowQueue.customFeedbackList : undefined, - durationSeconds - }; + ); }; /* get system variable */ diff --git a/packages/service/core/workflow/metrics.ts b/packages/service/core/workflow/metrics.ts new file mode 100644 index 0000000000..c9898d72d3 --- /dev/null +++ b/packages/service/core/workflow/metrics.ts @@ -0,0 +1,241 @@ +import { getMeter } from '../../common/metrics'; + +type MetricAttributeValue = string | number | boolean; +type MetricAttributes = Record; + +export type WorkflowStepMetricAttributes = { + workflowId?: string; + workflowName?: string; + nodeId: string; + nodeName?: string; + nodeType: string; + mode?: string; +}; + +type ProcessSnapshot = { + rss: number; + heapUsed: number; + external: number; + arrayBuffers: number; + cpuUser: number; + cpuSystem: number; +}; + +type StepObservationState = { + startedAt: bigint; + startSnapshot: ProcessSnapshot; + hadOverlapAtStart: boolean; + overlapVersionAtStart: number; +}; + +function normalizeAttributes(attributes: Record): MetricAttributes { + const normalized: MetricAttributes = {}; + + Object.entries(attributes).forEach(([key, value]) => { + if (value === undefined || value === null) return; + + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + normalized[key] = value; + } + }); + + return normalized; +} + +function toMetricAttributes( + attributes: WorkflowStepMetricAttributes, + extras?: Record +) { + return normalizeAttributes({ + workflow_id: attributes.workflowId, + workflow_name: attributes.workflowName, + node_id: attributes.nodeId, + node_name: attributes.nodeName, + node_type: attributes.nodeType, + mode: attributes.mode, + ...extras + }); +} + +function takeProcessSnapshot(): ProcessSnapshot { + const memory = process.memoryUsage(); + const cpu = process.cpuUsage(); + + return { + rss: memory.rss, + heapUsed: memory.heapUsed, + external: memory.external, + arrayBuffers: memory.arrayBuffers, + cpuUser: cpu.user, + cpuSystem: cpu.system + }; +} + +let activeWorkflowStepCount = 0; +let overlapVersion = 0; + +function beginStepObservation(): StepObservationState { + const state: StepObservationState = { + startedAt: process.hrtime.bigint(), + startSnapshot: takeProcessSnapshot(), + hadOverlapAtStart: activeWorkflowStepCount > 0, + overlapVersionAtStart: overlapVersion + }; + + activeWorkflowStepCount += 1; + + if (activeWorkflowStepCount > 1) { + overlapVersion += 1; + } + + return state; +} + +const meter = getMeter('fastgpt.workflow'); +const prefix = 'fastgpt.workflow'; + +const stepDuration = meter.createHistogram(`${prefix}.step.duration`, { + description: 'Workflow step execution duration', + unit: 'ms' +}); +const stepExecutions = meter.createCounter(`${prefix}.step.executions`, { + description: 'Workflow step execution count' +}); +const stepActive = meter.createUpDownCounter(`${prefix}.step.active`, { + description: 'Workflow steps currently executing' +}); +const stepCpuUserTime = meter.createHistogram(`${prefix}.step.cpu.user_time`, { + description: 'Workflow step user CPU time', + unit: 'us' +}); +const stepCpuSystemTime = meter.createHistogram(`${prefix}.step.cpu.system_time`, { + description: 'Workflow step system CPU time', + unit: 'us' +}); +const stepMemoryRssStart = meter.createHistogram(`${prefix}.step.memory.rss_start`, { + description: 'Workflow process RSS memory snapshot at step start', + unit: 'By' +}); +const stepMemoryHeapUsedStart = meter.createHistogram(`${prefix}.step.memory.heap_used_start`, { + description: 'Workflow process heap used memory snapshot at step start', + unit: 'By' +}); +const stepMemoryExternalStart = meter.createHistogram(`${prefix}.step.memory.external_start`, { + description: 'Workflow process external memory snapshot at step start', + unit: 'By' +}); +const stepMemoryArrayBuffersStart = meter.createHistogram( + `${prefix}.step.memory.array_buffers_start`, + { + description: 'Workflow process array buffer memory snapshot at step start', + unit: 'By' + } +); +const stepMemoryRss = meter.createHistogram(`${prefix}.step.memory.rss`, { + description: 'Workflow process RSS memory snapshot at step end', + unit: 'By' +}); +const stepMemoryHeapUsed = meter.createHistogram(`${prefix}.step.memory.heap_used`, { + description: 'Workflow process heap used memory snapshot at step end', + unit: 'By' +}); +const stepMemoryExternal = meter.createHistogram(`${prefix}.step.memory.external`, { + description: 'Workflow process external memory snapshot at step end', + unit: 'By' +}); +const stepMemoryArrayBuffers = meter.createHistogram(`${prefix}.step.memory.array_buffers`, { + description: 'Workflow process array buffer memory snapshot at step end', + unit: 'By' +}); +const stepMemoryRssGrowth = meter.createHistogram(`${prefix}.step.memory.rss_growth`, { + description: 'Workflow process RSS memory growth during non-overlapping step execution', + unit: 'By' +}); +const stepMemoryHeapUsedGrowth = meter.createHistogram(`${prefix}.step.memory.heap_used_growth`, { + description: 'Workflow process heap used memory growth during non-overlapping step execution', + unit: 'By' +}); +const stepMemoryExternalGrowth = meter.createHistogram(`${prefix}.step.memory.external_growth`, { + description: 'Workflow process external memory growth during non-overlapping step execution', + unit: 'By' +}); + +export async function observeWorkflowStep( + attributes: WorkflowStepMetricAttributes, + fn: () => Promise | T +): Promise { + const observationState = beginStepObservation(); + const baseAttributes = toMetricAttributes(attributes); + + stepActive.add(1, baseAttributes); + + try { + const result = await fn(); + recordWorkflowStepEnd(attributes, observationState, 'ok', baseAttributes); + return result; + } catch (error) { + recordWorkflowStepEnd(attributes, observationState, 'error', baseAttributes); + throw error; + } +} + +function recordWorkflowStepEnd( + attributes: WorkflowStepMetricAttributes, + observationState: StepObservationState, + status: 'ok' | 'error', + baseAttributes: MetricAttributes +) { + const endSnapshot = takeProcessSnapshot(); + const metricAttributes = toMetricAttributes(attributes, { status }); + const stepOverlap = + observationState.hadOverlapAtStart || observationState.overlapVersionAtStart !== overlapVersion; + const memoryAttributes = toMetricAttributes(attributes, { + status, + memory_scope: 'process', + memory_attribution: stepOverlap ? 'best_effort' : 'exclusive', + step_overlap: stepOverlap + }); + const durationMs = Number(process.hrtime.bigint() - observationState.startedAt) / 1_000_000; + + stepDuration.record(durationMs, metricAttributes); + stepExecutions.add(1, metricAttributes); + stepCpuUserTime.record( + Math.max(0, endSnapshot.cpuUser - observationState.startSnapshot.cpuUser), + metricAttributes + ); + stepCpuSystemTime.record( + Math.max(0, endSnapshot.cpuSystem - observationState.startSnapshot.cpuSystem), + metricAttributes + ); + + stepMemoryRssStart.record(observationState.startSnapshot.rss, memoryAttributes); + stepMemoryHeapUsedStart.record(observationState.startSnapshot.heapUsed, memoryAttributes); + stepMemoryExternalStart.record(observationState.startSnapshot.external, memoryAttributes); + stepMemoryArrayBuffersStart.record(observationState.startSnapshot.arrayBuffers, memoryAttributes); + stepMemoryRss.record(endSnapshot.rss, memoryAttributes); + stepMemoryHeapUsed.record(endSnapshot.heapUsed, memoryAttributes); + stepMemoryExternal.record(endSnapshot.external, memoryAttributes); + stepMemoryArrayBuffers.record(endSnapshot.arrayBuffers, memoryAttributes); + + if (!stepOverlap && endSnapshot.rss > observationState.startSnapshot.rss) { + stepMemoryRssGrowth.record( + endSnapshot.rss - observationState.startSnapshot.rss, + memoryAttributes + ); + } + if (!stepOverlap && endSnapshot.heapUsed > observationState.startSnapshot.heapUsed) { + stepMemoryHeapUsedGrowth.record( + endSnapshot.heapUsed - observationState.startSnapshot.heapUsed, + memoryAttributes + ); + } + if (!stepOverlap && endSnapshot.external > observationState.startSnapshot.external) { + stepMemoryExternalGrowth.record( + endSnapshot.external - observationState.startSnapshot.external, + memoryAttributes + ); + } + + activeWorkflowStepCount = Math.max(0, activeWorkflowStepCount - 1); + stepActive.add(-1, baseAttributes); +} diff --git a/packages/service/env.ts b/packages/service/env.ts index 871e79bec0..d57f558e53 100644 --- a/packages/service/env.ts +++ b/packages/service/env.ts @@ -22,7 +22,17 @@ export const env = createEnv({ LOG_ENABLE_OTEL: BoolSchema.default(false), LOG_OTEL_LEVEL: LogLevelSchema.default('info'), LOG_OTEL_SERVICE_NAME: z.string().default('fastgpt-client'), - LOG_OTEL_URL: z.string().url().optional() + LOG_OTEL_URL: z.url().optional(), + + METRICS_ENABLE_OTEL: BoolSchema.default(false), + METRICS_EXPORT_INTERVAL: z.coerce.number().int().positive().default(15000), + METRICS_OTEL_SERVICE_NAME: z.string().default('fastgpt-client'), + METRICS_OTEL_URL: z.url().optional(), + + TRACING_ENABLE_OTEL: BoolSchema.default(false), + TRACING_OTEL_SERVICE_NAME: z.string().default('fastgpt-client'), + TRACING_OTEL_URL: z.url().optional(), + TRACING_OTEL_SAMPLE_RATIO: z.coerce.number().min(0).max(1).optional() }, emptyStringAsUndefined: true, runtimeEnv: process.env, diff --git a/packages/service/package.json b/packages/service/package.json index 00ebf8c431..a891d84059 100644 --- a/packages/service/package.json +++ b/packages/service/package.json @@ -8,13 +8,14 @@ }, "dependencies": { "@apidevtools/json-schema-ref-parser": "^11.7.2", - "@fastgpt-sdk/sandbox-adapter": "^0.0.22", + "@fastgpt-sdk/sandbox-adapter": "^0.0.27", + "@fastgpt-sdk/otel": "catalog:", "@fastgpt-sdk/storage": "catalog:", - "@fastgpt-sdk/logger": "catalog:", "@fastgpt/global": "workspace:*", "@maxmind/geoip2-node": "^6.3.4", "@modelcontextprotocol/sdk": "catalog:", "@node-rs/jieba": "2.0.1", + "@opentelemetry/api": "^1.9.0", "@t3-oss/env-core": "0.13.10", "@xmldom/xmldom": "^0.8.10", "@zilliz/milvus2-sdk-node": "2.4.10", @@ -36,8 +37,8 @@ "ioredis": "^5.6.0", "joplin-turndown-plugin-gfm": "^1.0.12", "json5": "catalog:", - "jsonrepair": "^3.0.0", "jsonpath-plus": "^10.3.0", + "jsonrepair": "^3.0.0", "jsonwebtoken": "^9.0.2", "lodash": "catalog:", "mammoth": "^1.11.0", diff --git a/packages/web/components/common/Textarea/PromptEditor/Editor.tsx b/packages/web/components/common/Textarea/PromptEditor/Editor.tsx index 10c2fe553a..f0409c19ab 100644 --- a/packages/web/components/common/Textarea/PromptEditor/Editor.tsx +++ b/packages/web/components/common/Textarea/PromptEditor/Editor.tsx @@ -7,7 +7,7 @@ */ import type { CSSProperties } from 'react'; -import { useEffect, useMemo, useState, useTransition } from 'react'; +import { useEffect, useMemo, useState, useTransition, useRef } from 'react'; import { LexicalComposer } from '@lexical/react/LexicalComposer'; import { PlainTextPlugin } from '@lexical/react/LexicalPlainTextPlugin'; import { RichTextPlugin } from '@lexical/react/LexicalRichTextPlugin'; @@ -33,7 +33,7 @@ import type { FormPropsType } from './type'; import { type EditorVariableLabelPickerType, type EditorVariablePickerType } from './type'; import { getNanoid } from '@fastgpt/global/common/string/tools'; import FocusPlugin from './plugins/FocusPlugin'; -import { textToEditorState } from './utils'; +import { textToEditorState, editorStateToText } from './utils'; import { MaxLengthPlugin } from './plugins/MaxLengthPlugin'; import { VariableLabelNode } from './plugins/VariableLabelPlugin/node'; import VariableLabelPlugin from './plugins/VariableLabelPlugin'; @@ -145,6 +145,7 @@ export default function Editor({ const [_, startSts] = useTransition(); const [focus, setFocus] = useState(false); const [scrollHeight, setScrollHeight] = useState(0); + const editorOutputRef = useRef(value); const initialConfig = { namespace: isRichText ? 'richPromptEditor' : 'promptEditor', @@ -164,7 +165,7 @@ export default function Editor({ }; useDeepCompareEffect(() => { - if (focus) return; + if (focus && value === editorOutputRef.current) return; setKey(getNanoid(6)); }, [value, variables, variableLabels, skillOption, selectedSkills]); @@ -256,6 +257,7 @@ export default function Editor({ { + editorOutputRef.current = editorStateToText(editor); const rootElement = editor.getRootElement(); setScrollHeight(rootElement?.scrollHeight || 0); startSts(() => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 605550c25a..20972ce69e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -33,6 +33,9 @@ catalogs: '@fastgpt-sdk/logger': specifier: 0.1.2 version: 0.1.2 + '@fastgpt-sdk/otel': + specifier: 0.1.0 + version: 0.1.0 '@fastgpt-sdk/storage': specifier: 0.6.15 version: 0.6.15 @@ -106,8 +109,8 @@ catalogs: specifier: 14.1.2 version: 14.1.2 tsdown: - specifier: ^0.21.0 - version: 0.21.0 + specifier: 0.21.4 + version: 0.21.4 typescript: specifier: ^5.9.3 version: 5.9.3 @@ -154,7 +157,7 @@ importers: version: 10.1.4(socks@2.8.4) next-i18next: specifier: 'catalog:' - version: 15.4.2(i18next@23.16.8)(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) prettier: specifier: 3.2.4 version: 3.2.4 @@ -243,12 +246,12 @@ importers: '@apidevtools/json-schema-ref-parser': specifier: ^11.7.2 version: 11.7.2 - '@fastgpt-sdk/logger': + '@fastgpt-sdk/otel': specifier: 'catalog:' - version: 0.1.2 + version: 0.1.0 '@fastgpt-sdk/sandbox-adapter': - specifier: ^0.0.22 - version: 0.0.22 + specifier: ^0.0.27 + version: 0.0.27 '@fastgpt-sdk/storage': specifier: 'catalog:' version: 0.6.15(@opentelemetry/api@1.9.0)(@types/node@24.0.13)(jiti@2.6.0)(lightningcss@1.30.1)(proxy-agent@6.5.0)(sass@1.85.1)(terser@5.39.0)(tsx@4.20.6)(yaml@2.8.1) @@ -264,6 +267,9 @@ importers: '@node-rs/jieba': specifier: 2.0.1 version: 2.0.1 + '@opentelemetry/api': + specifier: ^1.9.0 + version: 1.9.0 '@t3-oss/env-core': specifier: 0.13.10 version: 0.13.10(typescript@5.9.3)(zod@4.1.12) @@ -465,7 +471,7 @@ importers: version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1) '@chakra-ui/next-js': specifier: 'catalog:' - version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) + version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) '@chakra-ui/react': specifier: 'catalog:' version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -546,7 +552,7 @@ importers: version: 16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) next-i18next: specifier: 'catalog:' - version: 15.4.2(i18next@23.16.8)(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) papaparse: specifier: ^5.4.1 version: 5.4.1 @@ -619,7 +625,7 @@ importers: version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1) '@chakra-ui/next-js': specifier: 'catalog:' - version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) + version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) '@chakra-ui/react': specifier: 'catalog:' version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -736,7 +742,7 @@ importers: version: 16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) next-i18next: specifier: 'catalog:' - version: 15.4.2(i18next@23.16.8)(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) nprogress: specifier: ^0.2.0 version: 0.2.0 @@ -881,7 +887,7 @@ importers: version: 2.1.1(@chakra-ui/system@2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1))(react@18.3.1) '@chakra-ui/next-js': specifier: 'catalog:' - version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) + version: 2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1) '@chakra-ui/react': specifier: 'catalog:' version: 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -917,7 +923,7 @@ importers: version: 16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1) next-i18next: specifier: 'catalog:' - version: 15.4.2(i18next@23.16.8)(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + version: 15.4.2(i18next@23.16.8)(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) react: specifier: 'catalog:' version: 18.3.1 @@ -1083,7 +1089,59 @@ importers: version: 20.17.24 tsdown: specifier: 'catalog:' - version: 0.21.0(typescript@5.9.3) + version: 0.21.4(typescript@5.9.3) + typescript: + specifier: 'catalog:' + version: 5.9.3 + + sdk/otel: + dependencies: + '@logtape/logtape': + specifier: ^2 + version: 2.0.2 + '@logtape/pretty': + specifier: ^2 + version: 2.0.2(@logtape/logtape@2.0.2) + '@opentelemetry/api': + specifier: ^1.9.0 + version: 1.9.0 + '@opentelemetry/api-logs': + specifier: ^0.203.0 + version: 0.203.0 + '@opentelemetry/exporter-logs-otlp-http': + specifier: ^0.203.0 + version: 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': + specifier: ^0.203.0 + version: 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': + specifier: ^0.203.0 + version: 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': + specifier: ^2.0.1 + version: 2.5.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': + specifier: ^0.203.0 + version: 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': + specifier: ^2.0.1 + version: 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': + specifier: ^2.0.1 + version: 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': + specifier: ^2.0.1 + version: 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': + specifier: ^1.39.0 + version: 1.39.0 + devDependencies: + '@types/node': + specifier: 'catalog:' + version: 20.17.24 + tsdown: + specifier: 'catalog:' + version: 0.21.4(typescript@5.9.3) typescript: specifier: 'catalog:' version: 5.9.3 @@ -1123,7 +1181,7 @@ importers: version: 20.17.24 tsdown: specifier: 'catalog:' - version: 0.21.0(typescript@5.9.3) + version: 0.21.4(typescript@5.9.3) typescript: specifier: 'catalog:' version: 5.9.3 @@ -2658,11 +2716,15 @@ packages: resolution: {integrity: sha512-nt1qCq7frcRiR+406vEERWC1vEPVIKPUGH/ZRP/mlBxvNJp1RycWQT8RhK7/tHmW6xPNZoRL/q2WfhM4Q+L7eg==} engines: {node: '>=20', pnpm: '>=9'} + '@fastgpt-sdk/otel@0.1.0': + resolution: {integrity: sha512-wpZUcpoU4u1/UxC8R0KU7spZ1Ku2FbGerp4eBeOCgCDaiJGUbm3P7d1D97wzrPIK3QUgWj2g8N6dS0vrlryp2Q==} + engines: {node: '>=20', pnpm: '>=9'} + '@fastgpt-sdk/plugin@0.3.8': resolution: {integrity: sha512-GjKrXMHxeF5UMkYGXawrUpzZjVRw3DICNYODeYwsUVOy+/ltu5zuwsqLkuuGQ7Arp/SBCmYRjG/MHmeNp4xxfw==} - '@fastgpt-sdk/sandbox-adapter@0.0.22': - resolution: {integrity: sha512-08SLX1F76Q178Gb0nFM5VQo/ms1Iwafx0MMSAb9Xoo7VTW/3apfP/qRxyvoCUZJsbETqYKz+qaEfLNNcDnUAHg==} + '@fastgpt-sdk/sandbox-adapter@0.0.27': + resolution: {integrity: sha512-DWUic1HoF9pyAAMpbwTN7Iturim+9M9mBjP0MRoi8n/Ynyh47z+ZivMEcHdZSBo9moRMIeE+UuZOjfjw27+4/w==} engines: {node: '>=18'} '@fastgpt-sdk/storage@0.6.15': @@ -3495,6 +3557,12 @@ packages: resolution: {integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==} engines: {node: '>=8.0.0'} + '@opentelemetry/context-async-hooks@2.6.0': + resolution: {integrity: sha512-L8UyDwqpTcbkIK5cgwDRDYDoEhQoj8wp8BwsO19w3LB1Z41yEQm2VJyNfAi9DrLP/YTqXqWpKHyZfR9/tFYo1Q==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@2.0.1': resolution: {integrity: sha512-MaZk9SJIDgo1peKevlbhP6+IwIiNPNmswNL4AF0WaQJLbHXjr9SrZMgS12+iqr9ToV4ZVosCcc0f8Rg67LXjxw==} engines: {node: ^18.19.0 || >=20.6.0} @@ -3507,6 +3575,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/core@2.6.0': + resolution: {integrity: sha512-HLM1v2cbZ4TgYN6KEOj+Bbj8rAKriOdkF9Ed3tG25FoprSiQl7kYc+RRT6fUZGOvx0oMi5U67GoFdT+XUn8zEg==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/exporter-logs-otlp-grpc@0.202.0': resolution: {integrity: sha512-Y84L8Yja/A2qjGEzC/To0yrMUXHrtwJzHtZ2za1/ulZplRe5QFsLNyHixIS42ZYUKuNyWMDgOFhnN2Pz5uThtg==} engines: {node: ^18.19.0 || >=20.6.0} @@ -3531,6 +3605,18 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-metrics-otlp-http@0.203.0': + resolution: {integrity: sha512-HFSW10y8lY6BTZecGNpV3GpoSy7eaO0Z6GATwZasnT4bEsILp8UJXNG5OmEsz4SdwCSYvyCbTJdNbZP3/8LGCQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + + '@opentelemetry/exporter-trace-otlp-http@0.203.0': + resolution: {integrity: sha512-ZDiaswNYo0yq/cy1bBLJFe691izEJ6IgNmkjm4C6kE9ub/OMQqDXORx2D2j8fzTBTxONyzusbaZlqtfmyqURPw==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.202.0': resolution: {integrity: sha512-nMEOzel+pUFYuBJg2znGmHJWbmvMbdX5/RhoKNKowguMbURhz0fwik5tUKplLcUtl8wKPL1y9zPnPxeBn65N0Q==} engines: {node: ^18.19.0 || >=20.6.0} @@ -3573,6 +3659,12 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/resources@2.6.0': + resolution: {integrity: sha512-D4y/+OGe3JSuYUCBxtH5T9DSAWNcvCb/nQWIga8HNtXTVPQn59j0nTBAgaAXxUVBDl40mG3Tc76b46wPlZaiJQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + '@opentelemetry/sdk-logs@0.202.0': resolution: {integrity: sha512-pv8QiQLQzk4X909YKm0lnW4hpuQg4zHwJ4XBd5bZiXcd9urvrJNoNVKnxGHPiDVX/GiLFvr5DMYsDBQbZCypRQ==} engines: {node: ^18.19.0 || >=20.6.0} @@ -3597,9 +3689,17 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/semantic-conventions@1.36.0': - resolution: {integrity: sha512-TtxJSRD8Ohxp6bKkhrm27JRHAxPczQA7idtcTOMYI+wQRRrfgqxHv1cFbCApcSnNjtXkmzFozn6jQtFrOmbjPQ==} - engines: {node: '>=14'} + '@opentelemetry/sdk-trace-base@2.6.0': + resolution: {integrity: sha512-g/OZVkqlxllgFM7qMKqbPV9c1DUPhQ7d4n3pgZFcrnrNft9eJXZM2TNHTPYREJBrtNdRytYyvwjgL5geDKl3EQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.3.0 <1.10.0' + + '@opentelemetry/sdk-trace-node@2.6.0': + resolution: {integrity: sha512-YhswtasmsbIGEFvLGvR9p/y3PVRTfFf+mgY8van4Ygpnv4sA3vooAjvh+qAn9PNWxs4/IwGGqiQS0PPsaRJ0vQ==} + engines: {node: ^18.19.0 || >=20.6.0} + peerDependencies: + '@opentelemetry/api': '>=1.0.0 <1.10.0' '@opentelemetry/semantic-conventions@1.39.0': resolution: {integrity: sha512-R5R9tb2AXs2IRLNKLBJDynhkfmx7mX0vi8NkhZb3gUkPWHn6HXk5J8iQ/dql0U3ApfWym4kXXmBDRGO+oeOfjg==} @@ -3849,97 +3949,97 @@ packages: '@codemirror/state': ^6.0.0 '@codemirror/view': ^6.0.0 - '@rolldown/binding-android-arm64@1.0.0-rc.7': - resolution: {integrity: sha512-/uadfNUaMLFFBGvcIOiq8NnlhvTZTjOyybJaJnhGxD0n9k5vZRJfTaitH5GHnbwmc6T2PC+ZpS1FQH+vXyS/UA==} + '@rolldown/binding-android-arm64@1.0.0-rc.9': + resolution: {integrity: sha512-lcJL0bN5hpgJfSIz/8PIf02irmyL43P+j1pTCfbD1DbLkmGRuFIA4DD3B3ZOvGqG0XiVvRznbKtN0COQVaKUTg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [android] - '@rolldown/binding-darwin-arm64@1.0.0-rc.7': - resolution: {integrity: sha512-zokYr1KgRn0hRA89dmgtPj/BmKp9DxgrfAJvOEFfXa8nfYWW2nmgiYIBGpSIAJrEg7Qc/Qznovy6xYwmKh0M8g==} + '@rolldown/binding-darwin-arm64@1.0.0-rc.9': + resolution: {integrity: sha512-J7Zk3kLYFsLtuH6U+F4pS2sYVzac0qkjcO5QxHS7OS7yZu2LRs+IXo+uvJ/mvpyUljDJ3LROZPoQfgBIpCMhdQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [darwin] - '@rolldown/binding-darwin-x64@1.0.0-rc.7': - resolution: {integrity: sha512-eZFjbmrapCBVgMmuLALH3pmQQQStHFuRhsFceJHk6KISW8CkI2e9OPLp9V4qXksrySQcD8XM8fpvGLs5l5C7LQ==} + '@rolldown/binding-darwin-x64@1.0.0-rc.9': + resolution: {integrity: sha512-iwtmmghy8nhfRGeNAIltcNXzD0QMNaaA5U/NyZc1Ia4bxrzFByNMDoppoC+hl7cDiUq5/1CnFthpT9n+UtfFyg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [darwin] - '@rolldown/binding-freebsd-x64@1.0.0-rc.7': - resolution: {integrity: sha512-xjMrh8Dmu2DNwdY6DZsrF6YPGeesc3PaTlkh8v9cqmkSCNeTxnhX3ErhVnuv1j3n8t2IuuhQIwM9eZDINNEt5Q==} + '@rolldown/binding-freebsd-x64@1.0.0-rc.9': + resolution: {integrity: sha512-DLFYI78SCiZr5VvdEplsVC2Vx53lnA4/Ga5C65iyldMVaErr86aiqCoNBLl92PXPfDtUYjUh+xFFor40ueNs4Q==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [freebsd] - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.7': - resolution: {integrity: sha512-mOvftrHiXg4/xFdxJY3T9Wl1/zDAOSlMN8z9an2bXsCwuvv3RdyhYbSMZDuDO52S04w9z7+cBd90lvQSPTAQtw==} + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.9': + resolution: {integrity: sha512-CsjTmTwd0Hri6iTw/DRMK7kOZ7FwAkrO4h8YWKoX/kcj833e4coqo2wzIFywtch/8Eb5enQ/lwLM7w6JX1W5RQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm] os: [linux] - '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.7': - resolution: {integrity: sha512-TuUkeuEEPRyXMBbJ86NRhAiPNezxHW8merl3Om2HASA9Pl1rI+VZcTtsVQ6v/P0MDIFpSl0k0+tUUze9HIXyEw==} + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.9': + resolution: {integrity: sha512-2x9O2JbSPxpxMDhP9Z74mahAStibTlrBMW0520+epJH5sac7/LwZW5Bmg/E6CXuEF53JJFW509uP+lSedaUNxg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rolldown/binding-linux-arm64-musl@1.0.0-rc.7': - resolution: {integrity: sha512-G43ZElEvaby+YSOgrXfBgpeQv42LdS0ivFFYQufk2tBDWeBfzE/+ob5DmO8Izbyn4Y8k6GgLF11jFDYNnmU/3w==} + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.9': + resolution: {integrity: sha512-JA1QRW31ogheAIRhIg9tjMfsYbglXXYGNPLdPEYrwFxdbkQCAzvpSCSHCDWNl4hTtrol8WeboCSEpjdZK8qrCg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [linux] - '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.7': - resolution: {integrity: sha512-Y48ShVxGE2zUTt0A0PR3grCLNxW4DWtAfe5lxf6L3uYEQujwo/LGuRogMsAtOJeYLCPTJo2i714LOdnK34cHpw==} + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.9': + resolution: {integrity: sha512-aOKU9dJheda8Kj8Y3w9gnt9QFOO+qKPAl8SWd7JPHP+Cu0EuDAE5wokQubLzIDQWg2myXq2XhTpOVS07qqvT+w==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [ppc64] os: [linux] - '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.7': - resolution: {integrity: sha512-KU5DUYvX3qI8/TX6D3RA4awXi4Ge/1+M6Jqv7kRiUndpqoVGgD765xhV3Q6QvtABnYjLJenrWDl3S1B5U56ixA==} + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.9': + resolution: {integrity: sha512-OalO94fqj7IWRn3VdXWty75jC5dk4C197AWEuMhIpvVv2lw9fiPhud0+bW2ctCxb3YoBZor71QHbY+9/WToadA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [s390x] os: [linux] - '@rolldown/binding-linux-x64-gnu@1.0.0-rc.7': - resolution: {integrity: sha512-1THb6FdBkAEL12zvUue2bmK4W1+P+tz8Pgu5uEzq+xrtYa3iBzmmKNlyfUzCFNCqsPd8WJEQrYdLcw4iMW4AVw==} + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.9': + resolution: {integrity: sha512-cVEl1vZtBsBZna3YMjGXNvnYYrOJ7RzuWvZU0ffvJUexWkukMaDuGhUXn0rjnV0ptzGVkvc+vW9Yqy6h8YX4pg==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rolldown/binding-linux-x64-musl@1.0.0-rc.7': - resolution: {integrity: sha512-12o73atFNWDgYnLyA52QEUn9AH8pHIe12W28cmqjyHt4bIEYRzMICvYVCPa2IQm6DJBvCBrEhD9K+ct4wr2hwg==} + '@rolldown/binding-linux-x64-musl@1.0.0-rc.9': + resolution: {integrity: sha512-UzYnKCIIc4heAKgI4PZ3dfBGUZefGCJ1TPDuLHoCzgrMYPb5Rv6TLFuYtyM4rWyHM7hymNdsg5ik2C+UD9VDbA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [linux] - '@rolldown/binding-openharmony-arm64@1.0.0-rc.7': - resolution: {integrity: sha512-+uUgGwvuUCXl894MTsmTS2J0BnCZccFsmzV7y1jFxW5pTSxkuwL5agyPuDvDOztPeS6RrdqWkn7sT0jRd0ECkg==} + '@rolldown/binding-openharmony-arm64@1.0.0-rc.9': + resolution: {integrity: sha512-+6zoiF+RRyf5cdlFQP7nm58mq7+/2PFaY2DNQeD4B87N36JzfF/l9mdBkkmTvSYcYPE8tMh/o3cRlsx1ldLfog==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [openharmony] - '@rolldown/binding-wasm32-wasi@1.0.0-rc.7': - resolution: {integrity: sha512-53p2L/NSy21UiFOqUGlC11kJDZS2Nx2GJRz1QvbkXovypA3cOHbsyZHLkV72JsLSbiEQe+kg4tndUhSiC31UEA==} + '@rolldown/binding-wasm32-wasi@1.0.0-rc.9': + resolution: {integrity: sha512-rgFN6sA/dyebil3YTlL2evvi/M+ivhfnyxec7AccTpRPccno/rPoNlqybEZQBkcbZu8Hy+eqNJCqfBR8P7Pg8g==} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.7': - resolution: {integrity: sha512-K6svNRljO6QrL6VTKxwh4yThhlR9DT/tK0XpaFQMnJwwQKng+NYcVEtUkAM0WsoiZHw+Hnh3DGnn3taf/pNYGg==} + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.9': + resolution: {integrity: sha512-lHVNUG/8nlF1IQk1C0Ci574qKYyty2goMiPlRqkC5R+3LkXDkL5Dhx8ytbxq35m+pkHVIvIxviD+TWLdfeuadA==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [arm64] os: [win32] - '@rolldown/binding-win32-x64-msvc@1.0.0-rc.7': - resolution: {integrity: sha512-3ZJBT47VWLKVKIyvHhUSUgVwHzzZW761YAIkM3tOT+8ZTjFVp0acCM0Y2Z2j3jCl+XYi2d9y2uEWQ8H0PvvpPw==} + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.9': + resolution: {integrity: sha512-G0oA4+w1iY5AGi5HcDTxWsoxF509hrFIPB2rduV5aDqS9FtDg1CAfa7V34qImbjfhIcA8C+RekocJZA96EarwQ==} engines: {node: ^20.19.0 || >=22.12.0} cpu: [x64] os: [win32] - '@rolldown/pluginutils@1.0.0-rc.7': - resolution: {integrity: sha512-qujRfC8sFVInYSPPMLQByRh7zhwkGFS4+tyMQ83srV1qrxL4g8E2tyxVVyxd0+8QeBM1mIk9KbWxkegRr76XzA==} + '@rolldown/pluginutils@1.0.0-rc.9': + resolution: {integrity: sha512-w6oiRWgEBl04QkFZgmW+jnU1EC9b57Oihi2ot3HNWIQRqgHp5PnYDia5iZ5FF7rpa4EQdiqMDXjlqKGXBhsoXw==} '@rollup/rollup-android-arm-eabi@4.35.0': resolution: {integrity: sha512-uYQ2WfPaqz5QtVgMxfN6NpLD+no0MYHDBywl7itPYd3K5TjjSghNKmX8ic9S8NU8w81NVhJv/XojcHptRly7qQ==} @@ -7385,8 +7485,8 @@ packages: hookable@5.5.3: resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} - hookable@6.0.1: - resolution: {integrity: sha512-uKGyY8BuzN/a5gvzvA+3FVWo0+wUjgtfSdnmjtrOVwQCZPHpHDH2WRO3VZSOeluYrHoDCiXFffZXs8Dj1ULWtw==} + hookable@6.1.0: + resolution: {integrity: sha512-ZoKZSJgu8voGK2geJS+6YtYjvIzu9AOM/KZXsBxr83uhLL++e9pEv/dlgwgy3dvHg06kTz6JOh1hk3C8Ceiymw==} html-escaper@2.0.2: resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} @@ -9949,8 +10049,8 @@ packages: robust-predicates@3.0.2: resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} - rolldown-plugin-dts@0.22.4: - resolution: {integrity: sha512-pueqTPyN1N6lWYivyDGad+j+GO3DT67pzpct8s8e6KGVIezvnrDjejuw1AXFeyDRas3xTq4Ja6Lj5R5/04C5GQ==} + rolldown-plugin-dts@0.22.5: + resolution: {integrity: sha512-M/HXfM4cboo+jONx9Z0X+CUf3B5tCi7ni+kR5fUW50Fp9AlZk0oVLesibGWgCXDKFp5lpgQ9yhKoImUFjl3VZw==} engines: {node: '>=20.19.0'} peerDependencies: '@ts-macro/tsc': ^0.3.6 @@ -9968,8 +10068,8 @@ packages: vue-tsc: optional: true - rolldown@1.0.0-rc.7: - resolution: {integrity: sha512-5X0zEeQFzDpB3MqUWQZyO2TUQqP9VnT7CqXHF2laTFRy487+b6QZyotCazOySAuZLAvplCaOVsg1tVn/Zlmwfg==} + rolldown@1.0.0-rc.9: + resolution: {integrity: sha512-9EbgWge7ZH+yqb4d2EnELAntgPTWbfL8ajiTW+SyhJEC4qhBbkCKbqFV4Ge4zmu5ziQuVbWxb/XwLZ+RIO7E8Q==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true @@ -10549,6 +10649,10 @@ packages: resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} engines: {node: '>=18'} + tinyexec@1.0.4: + resolution: {integrity: sha512-u9r3uZC0bdpGOXtlxUIdwf9pkmvhqJdrVCH9fapQtgy/OeTTMZ1nqH7agtvEfmGui6e1XxjcdrlxvxJvc3sMqw==} + engines: {node: '>=18'} + tinyglobby@0.2.15: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} @@ -10657,14 +10761,14 @@ packages: tsconfig-paths@3.15.0: resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} - tsdown@0.21.0: - resolution: {integrity: sha512-Sw/ehzVhjYLD7HVBPybJHDxpcaeyFjPcaDCME23o9O4fyuEl6ibYEdrnB8W8UchYAGoayKqzWQqx/oIp3jn/Vg==} + tsdown@0.21.4: + resolution: {integrity: sha512-Q/kBi8SXkr4X6JI/NAZKZY1UuiEcbuXtIskL4tZCsgpDiEPM/2W6lC+OonNA31S+V3KsWedFvbFDBs23hvt+Aw==} engines: {node: '>=20.19.0'} hasBin: true peerDependencies: '@arethetypeswrong/core': ^0.18.1 - '@tsdown/css': 0.21.0 - '@tsdown/exe': 0.21.0 + '@tsdown/css': 0.21.4 + '@tsdown/exe': 0.21.4 '@vitejs/devtools': '*' publint: ^0.3.0 typescript: ^5.0.0 @@ -10899,8 +11003,8 @@ packages: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} - unrun@0.2.30: - resolution: {integrity: sha512-a4W1wDADI0gvDDr14T0ho1FgMhmfjq6M8Iz8q234EnlxgH/9cMHDueUSLwTl1fwSBs5+mHrLFYH+7B8ao36EBA==} + unrun@0.2.32: + resolution: {integrity: sha512-opd3z6791rf281JdByf0RdRQrpcc7WyzqittqIXodM/5meNWdTwrVxeyzbaCp4/Rgls/um14oUaif1gomO8YGg==} engines: {node: '>=20.19.0'} hasBin: true peerDependencies: @@ -12907,7 +13011,7 @@ snapshots: '@chakra-ui/system': 2.6.1(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(react@18.3.1) react: 18.3.1 - '@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)': + '@chakra-ui/next-js@2.4.2(@chakra-ui/react@2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react@18.3.1)': dependencies: '@chakra-ui/react': 2.10.7(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@emotion/styled@11.11.0(@emotion/react@11.11.1(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(react@18.3.1))(@types/react@18.3.1)(framer-motion@9.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@emotion/cache': 11.14.0 @@ -13534,16 +13638,32 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/api-logs': 0.203.0 '@opentelemetry/exporter-logs-otlp-http': 0.203.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-logs': 0.203.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 + '@fastgpt-sdk/otel@0.1.0': + dependencies: + '@logtape/logtape': 2.0.2 + '@logtape/pretty': 2.0.2(@logtape/logtape@2.0.2) + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.203.0 + '@opentelemetry/exporter-logs-otlp-http': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-node': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.39.0 + '@fastgpt-sdk/plugin@0.3.8': dependencies: '@fortaine/fetch-event-source': 3.0.6 zod: 4.1.12 - '@fastgpt-sdk/sandbox-adapter@0.0.22': + '@fastgpt-sdk/sandbox-adapter@0.0.27': dependencies: '@alibaba-group/opensandbox': 0.1.4 '@e2b/code-interpreter': 2.3.3 @@ -14403,6 +14523,10 @@ snapshots: '@opentelemetry/api@1.9.0': {} + '@opentelemetry/context-async-hooks@2.6.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core@2.0.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -14413,6 +14537,11 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.39.0 + '@opentelemetry/core@2.6.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/semantic-conventions': 1.39.0 + '@opentelemetry/exporter-logs-otlp-grpc@0.202.0(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.13.0 @@ -14452,6 +14581,24 @@ snapshots: '@opentelemetry/sdk-logs': 0.202.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-metrics-otlp-http@0.203.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 2.0.1(@opentelemetry/api@1.9.0) + + '@opentelemetry/exporter-trace-otlp-http@0.203.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.203.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base@0.202.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -14498,7 +14645,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 2.0.1(@opentelemetry/api@1.9.0) - '@opentelemetry/semantic-conventions': 1.36.0 + '@opentelemetry/semantic-conventions': 1.39.0 '@opentelemetry/resources@2.5.0(@opentelemetry/api@1.9.0)': dependencies: @@ -14506,6 +14653,12 @@ snapshots: '@opentelemetry/core': 2.5.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 + '@opentelemetry/resources@2.6.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.39.0 + '@opentelemetry/sdk-logs@0.202.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -14533,7 +14686,19 @@ snapshots: '@opentelemetry/resources': 2.0.1(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.39.0 - '@opentelemetry/semantic-conventions@1.36.0': {} + '@opentelemetry/sdk-trace-base@2.6.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/semantic-conventions': 1.39.0 + + '@opentelemetry/sdk-trace-node@2.6.0(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/context-async-hooks': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/core': 2.6.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions@1.39.0': {} @@ -14769,54 +14934,54 @@ snapshots: '@codemirror/state': 6.5.2 '@codemirror/view': 6.38.6 - '@rolldown/binding-android-arm64@1.0.0-rc.7': + '@rolldown/binding-android-arm64@1.0.0-rc.9': optional: true - '@rolldown/binding-darwin-arm64@1.0.0-rc.7': + '@rolldown/binding-darwin-arm64@1.0.0-rc.9': optional: true - '@rolldown/binding-darwin-x64@1.0.0-rc.7': + '@rolldown/binding-darwin-x64@1.0.0-rc.9': optional: true - '@rolldown/binding-freebsd-x64@1.0.0-rc.7': + '@rolldown/binding-freebsd-x64@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.7': + '@rolldown/binding-linux-arm-gnueabihf@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.7': + '@rolldown/binding-linux-arm64-gnu@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-arm64-musl@1.0.0-rc.7': + '@rolldown/binding-linux-arm64-musl@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.7': + '@rolldown/binding-linux-ppc64-gnu@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.7': + '@rolldown/binding-linux-s390x-gnu@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-x64-gnu@1.0.0-rc.7': + '@rolldown/binding-linux-x64-gnu@1.0.0-rc.9': optional: true - '@rolldown/binding-linux-x64-musl@1.0.0-rc.7': + '@rolldown/binding-linux-x64-musl@1.0.0-rc.9': optional: true - '@rolldown/binding-openharmony-arm64@1.0.0-rc.7': + '@rolldown/binding-openharmony-arm64@1.0.0-rc.9': optional: true - '@rolldown/binding-wasm32-wasi@1.0.0-rc.7': + '@rolldown/binding-wasm32-wasi@1.0.0-rc.9': dependencies: '@napi-rs/wasm-runtime': 1.1.1 optional: true - '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.7': + '@rolldown/binding-win32-arm64-msvc@1.0.0-rc.9': optional: true - '@rolldown/binding-win32-x64-msvc@1.0.0-rc.7': + '@rolldown/binding-win32-x64-msvc@1.0.0-rc.9': optional: true - '@rolldown/pluginutils@1.0.0-rc.7': {} + '@rolldown/pluginutils@1.0.0-rc.9': {} '@rollup/rollup-android-arm-eabi@4.35.0': optional: true @@ -18339,8 +18504,8 @@ snapshots: '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.2) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1) - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1) + eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.32.0)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.57.1) eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1) eslint-plugin-react: 7.37.4(eslint@8.57.1) eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1) @@ -18359,7 +18524,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1): + eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0)(eslint@8.57.1): dependencies: '@nolyfill/is-core-module': 1.0.39 debug: 4.4.3 @@ -18370,22 +18535,22 @@ snapshots: stable-hash: 0.0.5 tinyglobby: 0.2.15 optionalDependencies: - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1): + eslint-module-utils@2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.9.0)(eslint@8.57.1): dependencies: debug: 3.2.7 optionalDependencies: '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.2) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1) + eslint-import-resolver-typescript: 3.9.0(eslint-plugin-import@2.32.0)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-typescript@3.9.0)(eslint@8.57.1): dependencies: '@rtsao/scc': 1.1.0 array-includes: 3.1.9 @@ -18396,7 +18561,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.9.0(eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.9.0)(eslint@8.57.1) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -19325,7 +19490,7 @@ snapshots: hookable@5.5.3: {} - hookable@6.0.1: {} + hookable@6.1.0: {} html-escaper@2.0.2: {} @@ -21047,7 +21212,7 @@ snapshots: transitivePeerDependencies: - supports-color - next-i18next@15.4.2(i18next@23.16.8)(next@16.1.6(@babel/core@7.26.10)(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): + next-i18next@15.4.2(i18next@23.16.8)(next@16.1.6(@opentelemetry/api@1.9.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(sass@1.85.1))(react-i18next@14.1.2(i18next@23.16.8)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): dependencies: '@babel/runtime': 7.26.10 '@types/hoist-non-react-statics': 3.3.6 @@ -21304,7 +21469,7 @@ snapshots: '@opentelemetry/exporter-logs-otlp-grpc': 0.202.0(@opentelemetry/api@1.9.0) '@opentelemetry/exporter-logs-otlp-http': 0.202.0(@opentelemetry/api@1.9.0) '@opentelemetry/exporter-logs-otlp-proto': 0.202.0(@opentelemetry/api@1.9.0) - '@opentelemetry/resources': 2.5.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 2.6.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-logs': 0.202.0(@opentelemetry/api@1.9.0) transitivePeerDependencies: - '@opentelemetry/api' @@ -22401,7 +22566,7 @@ snapshots: robust-predicates@3.0.2: {} - rolldown-plugin-dts@0.22.4(rolldown@1.0.0-rc.7)(typescript@5.9.3): + rolldown-plugin-dts@0.22.5(rolldown@1.0.0-rc.9)(typescript@5.9.3): dependencies: '@babel/generator': 8.0.0-rc.2 '@babel/helper-validator-identifier': 8.0.0-rc.2 @@ -22412,32 +22577,32 @@ snapshots: dts-resolver: 2.1.3 get-tsconfig: 4.13.6 obug: 2.1.1 - rolldown: 1.0.0-rc.7 + rolldown: 1.0.0-rc.9 optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: - oxc-resolver - rolldown@1.0.0-rc.7: + rolldown@1.0.0-rc.9: dependencies: '@oxc-project/types': 0.115.0 - '@rolldown/pluginutils': 1.0.0-rc.7 + '@rolldown/pluginutils': 1.0.0-rc.9 optionalDependencies: - '@rolldown/binding-android-arm64': 1.0.0-rc.7 - '@rolldown/binding-darwin-arm64': 1.0.0-rc.7 - '@rolldown/binding-darwin-x64': 1.0.0-rc.7 - '@rolldown/binding-freebsd-x64': 1.0.0-rc.7 - '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.7 - '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.7 - '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.7 - '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.7 - '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.7 - '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.7 - '@rolldown/binding-linux-x64-musl': 1.0.0-rc.7 - '@rolldown/binding-openharmony-arm64': 1.0.0-rc.7 - '@rolldown/binding-wasm32-wasi': 1.0.0-rc.7 - '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.7 - '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.7 + '@rolldown/binding-android-arm64': 1.0.0-rc.9 + '@rolldown/binding-darwin-arm64': 1.0.0-rc.9 + '@rolldown/binding-darwin-x64': 1.0.0-rc.9 + '@rolldown/binding-freebsd-x64': 1.0.0-rc.9 + '@rolldown/binding-linux-arm-gnueabihf': 1.0.0-rc.9 + '@rolldown/binding-linux-arm64-gnu': 1.0.0-rc.9 + '@rolldown/binding-linux-arm64-musl': 1.0.0-rc.9 + '@rolldown/binding-linux-ppc64-gnu': 1.0.0-rc.9 + '@rolldown/binding-linux-s390x-gnu': 1.0.0-rc.9 + '@rolldown/binding-linux-x64-gnu': 1.0.0-rc.9 + '@rolldown/binding-linux-x64-musl': 1.0.0-rc.9 + '@rolldown/binding-openharmony-arm64': 1.0.0-rc.9 + '@rolldown/binding-wasm32-wasi': 1.0.0-rc.9 + '@rolldown/binding-win32-arm64-msvc': 1.0.0-rc.9 + '@rolldown/binding-win32-x64-msvc': 1.0.0-rc.9 rollup@4.35.0: dependencies: @@ -23155,6 +23320,8 @@ snapshots: tinyexec@1.0.2: {} + tinyexec@1.0.4: {} + tinyglobby@0.2.15: dependencies: fdir: 6.5.0(picomatch@4.0.3) @@ -23233,24 +23400,24 @@ snapshots: minimist: 1.2.8 strip-bom: 3.0.0 - tsdown@0.21.0(typescript@5.9.3): + tsdown@0.21.4(typescript@5.9.3): dependencies: ansis: 4.2.0 cac: 7.0.0 defu: 6.1.4 empathic: 2.0.0 - hookable: 6.0.1 + hookable: 6.1.0 import-without-cache: 0.2.5 obug: 2.1.1 picomatch: 4.0.3 - rolldown: 1.0.0-rc.7 - rolldown-plugin-dts: 0.22.4(rolldown@1.0.0-rc.7)(typescript@5.9.3) + rolldown: 1.0.0-rc.9 + rolldown-plugin-dts: 0.22.5(rolldown@1.0.0-rc.9)(typescript@5.9.3) semver: 7.7.4 - tinyexec: 1.0.2 + tinyexec: 1.0.4 tinyglobby: 0.2.15 tree-kill: 1.2.2 unconfig-core: 7.5.0 - unrun: 0.2.30 + unrun: 0.2.32 optionalDependencies: typescript: 5.9.3 transitivePeerDependencies: @@ -23504,9 +23671,9 @@ snapshots: unpipe@1.0.0: {} - unrun@0.2.30: + unrun@0.2.32: dependencies: - rolldown: 1.0.0-rc.7 + rolldown: 1.0.0-rc.9 update-browserslist-db@1.1.3(browserslist@4.24.4): dependencies: diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 8f8c6ba1f9..edd70f7cf8 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -22,6 +22,7 @@ catalog: '@modelcontextprotocol/sdk': ^1 '@fastgpt-sdk/storage': 0.6.15 '@fastgpt-sdk/logger': 0.1.2 + '@fastgpt-sdk/otel': 0.1.0 '@types/lodash': ^4 '@types/react': ^18 '@types/react-dom': ^18 @@ -44,6 +45,20 @@ catalog: react: ^18 react-dom: ^18 react-i18next: 14.1.2 - tsdown: ^0.21.0 + tsdown: 0.21.4 typescript: ^5.9.3 zod: ^4 + +onlyBuiltDependencies: + - '@parcel/watcher' + - bufferutil + - canvas + - core-js + - esbuild + - mongodb-memory-server + - msgpackr-extract + - protobufjs + - puppeteer + - sharp + - utf-8-validate + - vue-demi diff --git a/projects/app/.env.template b/projects/app/.env.template index f409d8b71d..f3cc229721 100644 --- a/projects/app/.env.template +++ b/projects/app/.env.template @@ -50,12 +50,22 @@ HELPER_BOT_MODEL=qwen-max # ==================== 日志配置 ==================== # 日志等级: trace | debug | info | warning | error | fatal LOG_ENABLE_CONSOLE=true -LOG_CONSOLE_LEVEL=info -LOG_ENABLE_OTEL=false +LOG_CONSOLE_LEVEL=debug +LOG_ENABLE_OTEL=true LOG_OTEL_LEVEL=info LOG_OTEL_SERVICE_NAME=fastgpt-client LOG_OTEL_URL=http://localhost:4318/v1/logs +# 指标 +METRICS_ENABLE_OTEL=true +METRICS_OTEL_URL=http://localhost:4318/v1/metrics +METRICS_OTEL_SERVICE_NAME=fastgpt-client + +# 追踪 +TRACING_ENABLE_OTEL=true +TRACING_OTEL_URL=http://localhost:4318/v1/traces +TRACING_OTEL_SERVICE_NAME=fastgpt-client + # ==================== 对象存储 ==================== # 存储供应商;如果是 Sealos 的对象存储请填 aws-s3 STORAGE_VENDOR=minio diff --git a/projects/app/Dockerfile b/projects/app/Dockerfile index bb8c4ccd45..3252782309 100644 --- a/projects/app/Dockerfile +++ b/projects/app/Dockerfile @@ -45,6 +45,15 @@ ENV NODE_OPTIONS="--max-old-space-size=4096" ENV NEXT_PUBLIC_BASE_URL=$base_url RUN pnpm --filter=app build +# Remove build-time-only packages from standalone output before copying to runner. +# These are traced into standalone by mistake (rspack bindings, gnu platform binaries, etc.) +RUN rm -rf projects/app/.next/standalone/node_modules/.pnpm/@next+rspack-binding-*/ \ + projects/app/.next/standalone/node_modules/.pnpm/@rspack+binding-*/ \ + projects/app/.next/standalone/node_modules/.pnpm/next-rspack*/ \ + projects/app/.next/standalone/node_modules/.pnpm/typescript@*/ \ + projects/app/.next/standalone/node_modules/.pnpm/*-linux-x64-gnu@*/ \ + projects/app/.next/standalone/node_modules/.pnpm/@img+sharp-libvips-linux-x64@*/ + # --------- runner ----------- FROM node:20.14.0-alpine AS runner WORKDIR /app @@ -74,18 +83,13 @@ COPY --from=builder --chown=nextjs:nodejs /app/projects/app/worker /app/projects COPY --from=maindeps /app/node_modules/tiktoken ./node_modules/tiktoken RUN rm -rf ./node_modules/tiktoken/encoders COPY --from=maindeps /app/node_modules/@zilliz/milvus2-sdk-node ./node_modules/@zilliz/milvus2-sdk-node + # copy package.json to version file COPY --from=builder /app/projects/app/package.json ./package.json -# copy config -COPY ./projects/app/data/config.json /app/data/config.json -# copy test.mp3 -COPY ./projects/app/data/test.mp3 /app/data/test.mp3 -# copy GeoLite2-City.mmdb -COPY ./projects/app/data/GeoLite2-City.mmdb /app/data/GeoLite2-City.mmdb - -RUN chown -R nextjs:nodejs /app/data - -# Add tmp directory permission control +# copy config and data files (use --chown to avoid extra layer from chown) +COPY --chown=nextjs:nodejs ./projects/app/data/config.json /app/data/config.json +COPY --chown=nextjs:nodejs ./projects/app/data/test.mp3 /app/data/test.mp3 +COPY --chown=nextjs:nodejs ./projects/app/data/GeoLite2-City.mmdb /app/data/GeoLite2-City.mmdb ENV NODE_ENV=production ENV NEXT_TELEMETRY_DISABLED=1 diff --git a/projects/app/next.config.ts b/projects/app/next.config.ts index 07008e0318..6ab90976c3 100644 --- a/projects/app/next.config.ts +++ b/projects/app/next.config.ts @@ -62,6 +62,10 @@ const nextConfig: NextConfig = { { module: /bullmq[\\/]dist[\\/](cjs|esm)[\\/]classes[\\/]child-processor\.js$/, message: /Critical dependency: the request of a dependency is an expression/ + }, + { + module: /@fastgpt-sdk[\\/]sandbox-adapter[\\/]/, + message: /Critical dependency/ } ]; @@ -96,16 +100,14 @@ const nextConfig: NextConfig = { } if (isServer) { - (config.externals as string[]).push('@node-rs/jieba'); config.externals.push({ - '@e2b/code-interpreter': 'commonjs @e2b/code-interpreter', - e2b: 'commonjs e2b' + '@node-rs/jieba': '@node-rs/jieba' }); } config.experiments = { - asyncWebAssembly: true, - layers: true + ...config.experiments, + asyncWebAssembly: true }; if (isDev && !isServer) { @@ -131,15 +133,14 @@ const nextConfig: NextConfig = { return config; }, - transpilePackages: ['@modelcontextprotocol/sdk', 'ahooks', '@fastgpt-sdk/sandbox-adapter'], + transpilePackages: ['@modelcontextprotocol/sdk', 'ahooks'], serverExternalPackages: [ 'mongoose', 'pg', 'bullmq', '@zilliz/milvus2-sdk-node', 'tiktoken', - '@opentelemetry/api-logs', - 'chalk' + '@opentelemetry/api-logs' ], // 优化大库的 barrel exports tree-shaking experimental: { @@ -151,14 +152,37 @@ const nextConfig: NextConfig = { 'ahooks', 'framer-motion', '@emotion/react', - '@emotion/styled' + '@emotion/styled', + 'react-syntax-highlighter', + 'recharts', + '@tanstack/react-query', + 'react-hook-form', + 'react-markdown' ], // 按页面拆分 CSS chunk,减少首屏 CSS 体积 cssChunking: 'strict', // 减少内存占用 memoryBasedWorkersCount: true }, - outputFileTracingRoot: path.join(__dirname, '../../') + outputFileTracingRoot: path.join(__dirname, '../../'), + // Exclude build-time-only packages from standalone output file tracing + outputFileTracingExcludes: { + '*': [ + // Rspack bindings - only used in dev, not needed at runtime + 'node_modules/@next/rspack-binding-*/**', + 'node_modules/@rspack/binding-*/**', + 'node_modules/next-rspack/**', + // GNU platform binaries - Alpine uses musl only + 'node_modules/**/*-linux-x64-gnu*/**', + // typescript - build-time only + 'node_modules/typescript/**', + // sharp libvips GNU variant (keep musl) + 'node_modules/@img/sharp-libvips-linux-x64/**', + // bundle-analyzer - build-time only + 'node_modules/@next/bundle-analyzer/**', + 'node_modules/webpack-bundle-analyzer/**' + ] + } }; const configWithPluginsExceptWithRspack = withBundleAnalyzer(nextConfig); diff --git a/projects/app/package.json b/projects/app/package.json index 423b7e2ccc..07c1fcbfc6 100644 --- a/projects/app/package.json +++ b/projects/app/package.json @@ -1,6 +1,6 @@ { "name": "app", - "version": "4.14.8.4", + "version": "4.14.9", "private": false, "scripts": { "dev": "NODE_OPTIONS='--max-old-space-size=8192' npm run build:workers && next dev", diff --git a/projects/app/src/components/core/chat/ChatContainer/ChatBox/components/ChatItem.tsx b/projects/app/src/components/core/chat/ChatContainer/ChatBox/components/ChatItem.tsx index ffb8937473..bd4d5912d7 100644 --- a/projects/app/src/components/core/chat/ChatContainer/ChatBox/components/ChatItem.tsx +++ b/projects/app/src/components/core/chat/ChatContainer/ChatBox/components/ChatItem.tsx @@ -80,7 +80,12 @@ const HumanContentCard = React.memo( {files.length > 0 && } {text && ( - + {text} )} diff --git a/projects/app/src/components/core/chat/HelperBot/components/HumanItem.tsx b/projects/app/src/components/core/chat/HelperBot/components/HumanItem.tsx index 8390507b3a..acd3745f0a 100644 --- a/projects/app/src/components/core/chat/HelperBot/components/HumanItem.tsx +++ b/projects/app/src/components/core/chat/HelperBot/components/HumanItem.tsx @@ -46,7 +46,11 @@ const HumanItem = ({ chat }: { chat: UserChatItemType }) => { > {files.length > 0 && } - {text && {text}} + {text && ( + + {text} + + )} diff --git a/projects/app/src/components/core/chat/HelperBot/context.tsx b/projects/app/src/components/core/chat/HelperBot/context.tsx index f1839071da..29e4497ddd 100644 --- a/projects/app/src/components/core/chat/HelperBot/context.tsx +++ b/projects/app/src/components/core/chat/HelperBot/context.tsx @@ -28,7 +28,8 @@ export const HelperBotContext = createContext({ taskObject: '', selectedTools: [], selectedDatasets: [], - fileUpload: false + fileUpload: false, + enableSandbox: false }, onApply: function (e): void { throw new Error('Function not implemented.'); diff --git a/projects/app/src/instrumentation.ts b/projects/app/src/instrumentation.ts index 7d26c97728..33ca6527e3 100644 --- a/projects/app/src/instrumentation.ts +++ b/projects/app/src/instrumentation.ts @@ -28,6 +28,8 @@ export async function register() { { initGeo }, { instrumentationCheck }, { getErrText }, + { configureMetrics }, + { configureTracing }, { configureLogger, getLogger, LogCategories }, { InitialErrorEnum } ] = await Promise.all([ @@ -49,10 +51,14 @@ export async function register() { import('@fastgpt/service/common/geo'), import('@/service/common/system/health'), import('@fastgpt/global/common/error/utils'), + import('@fastgpt/service/common/metrics'), + import('@fastgpt/service/common/tracing'), import('@fastgpt/service/common/logger'), import('@fastgpt/service/common/system/constants') ]); + await configureMetrics(); + await configureTracing(); await configureLogger(); const logger = getLogger(LogCategories.SYSTEM); logger.info('Starting system initialization...'); diff --git a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/ChatTest.tsx b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/ChatTest.tsx index d5d79a335b..88abb461fe 100644 --- a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/ChatTest.tsx +++ b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/ChatTest.tsx @@ -78,6 +78,7 @@ const ChatTest = ({ appForm, setAppForm, setRenderEdit, form2WorkflowFn }: Props selectedTools: appForm.selectedTools.map((tool) => tool.id), selectedDatasets: appForm.dataset.datasets.map((dataset) => dataset.datasetId), fileUpload: appForm.chatConfig.fileSelectConfig?.canSelectFile || false, + enableSandbox: appForm.aiSettings.useAgentSandbox || false, modelConfig: { model: appForm.aiSettings.model, temperature: appForm.aiSettings.temperature, @@ -153,6 +154,7 @@ const ChatTest = ({ appForm, setAppForm, setRenderEdit, form2WorkflowFn }: Props metadata={topAgentMetadata} onApply={async (formData) => { const fileUploadEnabled = !!formData.fileUploadEnabled; + const enableSandboxEnabled = !!formData.enableSandboxEnabled; // Filter internal tools const filteredToolIds = (formData.tools || []).filter( @@ -178,7 +180,8 @@ const ChatTest = ({ appForm, setAppForm, setRenderEdit, form2WorkflowFn }: Props : prev.dataset, aiSettings: { ...prev.aiSettings, - systemPrompt: formData.systemPrompt || prev.aiSettings.systemPrompt + systemPrompt: formData.systemPrompt || prev.aiSettings.systemPrompt, + useAgentSandbox: enableSandboxEnabled }, chatConfig: { ...prev.chatConfig, diff --git a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/EditForm.tsx b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/EditForm.tsx index ffdcec9d3e..7811bb7988 100644 --- a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/EditForm.tsx +++ b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/EditForm.tsx @@ -99,7 +99,8 @@ const EditForm = ({ appForm.chatConfig.fileSelectConfig?.canSelectAudio || appForm.chatConfig.fileSelectConfig?.canSelectCustomFileExtension ), - hasSelectedDataset: (appForm.dataset.datasets?.length || 0) > 0 + hasSelectedDataset: (appForm.dataset.datasets?.length || 0) > 0, + useAgentSandbox: !!appForm.aiSettings.useAgentSandbox }); const { diff --git a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/hooks/useSkillManager.tsx b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/hooks/useSkillManager.tsx index 7a1ceee3f7..f1244e9380 100644 --- a/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/hooks/useSkillManager.tsx +++ b/projects/app/src/pageComponents/app/detail/Edit/ChatAgent/hooks/useSkillManager.tsx @@ -49,13 +49,15 @@ export const useSkillManager = ({ onUpdateOrAddTool, onDeleteTool, canUploadFile, - hasSelectedDataset + hasSelectedDataset, + useAgentSandbox }: { selectedTools: SelectedToolItemType[]; onDeleteTool: (id: string) => void; onUpdateOrAddTool: (tool: SelectedToolItemType) => void; canUploadFile: boolean; hasSelectedDataset: boolean; + useAgentSandbox: boolean; }) => { const { t, i18n } = useTranslation(); const { toast } = useToast(); @@ -109,6 +111,17 @@ export const useSkillManager = ({ }); } + const sandboxToolInfo = systemSubInfo[SubAppIds.sandboxTool]; + if (sandboxToolInfo) { + apiTools.unshift({ + id: SubAppIds.sandboxTool, + label: parseI18nString(sandboxToolInfo.name, i18n.language), + icon: sandboxToolInfo.avatar, + description: sandboxToolInfo.toolDescription, + canClick: true + }); + } + return apiTools; }, { @@ -324,8 +337,25 @@ export const useSkillManager = ({ }); } + // Merge sandbox tool + const sandboxToolInfo = systemSubInfo[SubAppIds.sandboxTool]; + if (sandboxToolInfo) { + tools.push({ + id: SubAppIds.sandboxTool, + pluginId: SubAppIds.sandboxTool, + name: parseI18nString(sandboxToolInfo.name, i18n.language), + avatar: sandboxToolInfo.avatar, + intro: sandboxToolInfo.toolDescription, + flowNodeType: FlowNodeTypeEnum.tool, + templateType: FlowNodeTemplateTypeEnum.tools, + inputs: [], + outputs: [], + configStatus: useAgentSandbox ? 'noConfig' : 'invalid' + }); + } + return tools; - }, [selectedTools, canUploadFile, hasSelectedDataset, i18n.language]); + }, [selectedTools, canUploadFile, hasSelectedDataset, useAgentSandbox, i18n.language]); const [configTool, setConfigTool] = useState(); const onClickSkill = useCallback( diff --git a/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/NodeCard.tsx b/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/NodeCard.tsx index f5251161de..c960deaed2 100644 --- a/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/NodeCard.tsx +++ b/projects/app/src/pageComponents/app/detail/WorkflowComponents/Flow/nodes/render/NodeCard.tsx @@ -223,12 +223,10 @@ const NodeCard = (props: Props) => { // 1. MCP tool, HTTP tool set and system tool set do not have version if ( isAppNode && - ( - node.toolConfig?.mcpToolSet || + (node.toolConfig?.mcpToolSet || node.toolConfig?.mcpTool || node?.toolConfig?.httpToolSet || - node?.toolConfig?.systemToolSet - ) + node?.toolConfig?.systemToolSet) ) return false; // 2. Team app/System commercial plugin diff --git a/sdk/otel/README.md b/sdk/otel/README.md new file mode 100644 index 0000000000..53f35bc6b8 --- /dev/null +++ b/sdk/otel/README.md @@ -0,0 +1,86 @@ +# @fastgpt-sdk/otel + +FastGPT 的统一 OpenTelemetry / observability SDK。 + +这个包的目标是作为未来的迁移目标,把现有的: + +- `@fastgpt-sdk/logger` +- `@fastgpt-sdk/metrics` +- tracing 能力 + +收拢到一个统一入口里,但目前不强制迁移现有代码。 + +它现在是一个自包含包: + +- 内部自带 logger 实现 +- 内部自带 metrics 实现 +- 内部自带 tracing 实现 +- 不依赖 `@fastgpt-sdk/logger` 或 `@fastgpt-sdk/metrics` + +同时支持两种使用方式: + +- 统一入口:`@fastgpt-sdk/otel` +- 渐进迁移入口:`@fastgpt-sdk/otel/logger`、`@fastgpt-sdk/otel/metrics`、`@fastgpt-sdk/otel/tracing` + +## 包含内容 + +- 内置 logger 能力 +- 内置 metrics 能力 +- 内置通用 tracing 能力 +- 提供统一的 `configureOtel()` / `configureOtelFromEnv()` 入口 + +## 快速开始 + +```ts +import { + configureOtelFromEnv, + getLogger, + getMeter, + getTracer +} from '@fastgpt-sdk/otel'; + +await configureOtelFromEnv({ + defaultServiceName: 'fastgpt-client' +}); + +const logger = getLogger(['system']); +const meter = getMeter('fastgpt-client'); +const tracer = getTracer('fastgpt-client'); +``` + +也可以渐进迁移: + +```ts +import { configureLoggerFromEnv, getLogger } from '@fastgpt-sdk/otel/logger'; +import { configureMetricsFromEnv, getMeter } from '@fastgpt-sdk/otel/metrics'; +import { configureTracingFromEnv, getTracer } from '@fastgpt-sdk/otel/tracing'; +``` + +## 迁移思路 + +未来可以分阶段迁移: + +1. 先只把初始化入口从多个 SDK 收拢到 `@fastgpt-sdk/otel` +2. 再逐步把 import 从 `logger/metrics` 改成 `otel` +3. 最后按业务需要补 traces + +## tracing 环境变量 + +- `TRACING_ENABLE_OTEL` +- `TRACING_OTEL_SERVICE_NAME` +- `TRACING_OTEL_URL` +- `TRACING_OTEL_SAMPLE_RATIO` + +同时兼容标准 OTEL fallback: + +- `OTEL_SERVICE_NAME` +- `OTEL_EXPORTER_OTLP_TRACES_ENDPOINT` +- `OTEL_EXPORTER_OTLP_ENDPOINT` +- `OTEL_TRACES_EXPORTER` +- `OTEL_TRACES_SAMPLER` +- `OTEL_TRACES_SAMPLER_ARG` + +## 说明 + +- 这个包当前是“整理好的统一入口”,不是“已经迁移完成的替换方案”。 +- 现有 `logger` 与 `metrics` 包仍然可继续独立使用,后续可以逐步迁移到这个包。 diff --git a/sdk/otel/package.json b/sdk/otel/package.json new file mode 100644 index 0000000000..e53b58003b --- /dev/null +++ b/sdk/otel/package.json @@ -0,0 +1,81 @@ +{ + "name": "@fastgpt-sdk/otel", + "private": false, + "version": "0.1.0", + "description": "FastGPT SDK for OpenTelemetry observability", + "type": "module", + "main": "./dist/index.mjs", + "types": "./dist/index.d.mts", + "exports": { + ".": { + "import": "./dist/index.mjs", + "types": "./dist/index.d.mts" + }, + "./logger": { + "import": "./dist/logger-entry.mjs", + "types": "./dist/logger-entry.d.mts" + }, + "./metrics": { + "import": "./dist/metrics-entry.mjs", + "types": "./dist/metrics-entry.d.mts" + }, + "./tracing": { + "import": "./dist/tracing-entry.mjs", + "types": "./dist/tracing-entry.d.mts" + } + }, + "files": [ + "dist" + ], + "scripts": { + "build": "tsdown", + "dev": "tsdown --watch", + "prepublishOnly": "pnpm build" + }, + "keywords": [ + "otel", + "opentelemetry", + "metrics", + "tracing", + "logging" + ], + "author": "FastGPT", + "repository": { + "type": "git", + "url": "https://github.com/labring/FastGPT.git", + "directory": "FastGPT/sdk/otel" + }, + "homepage": "https://github.com/labring/FastGPT", + "bugs": { + "url": "https://github.com/labring/FastGPT/issues" + }, + "publishConfig": { + "access": "public" + }, + "engines": { + "node": ">=20", + "pnpm": ">=9" + }, + "packageManager": "pnpm@9.15.9", + "license": "Apache-2.0", + "dependencies": { + "@logtape/logtape": "^2", + "@logtape/pretty": "^2", + "@opentelemetry/api": "^1.9.0", + "@opentelemetry/api-logs": "^0.203.0", + "@opentelemetry/exporter-logs-otlp-http": "^0.203.0", + "@opentelemetry/exporter-metrics-otlp-http": "^0.203.0", + "@opentelemetry/exporter-trace-otlp-http": "^0.203.0", + "@opentelemetry/resources": "^2.0.1", + "@opentelemetry/sdk-logs": "^0.203.0", + "@opentelemetry/sdk-metrics": "^2.0.1", + "@opentelemetry/sdk-trace-base": "^2.0.1", + "@opentelemetry/sdk-trace-node": "^2.0.1", + "@opentelemetry/semantic-conventions": "^1.39.0" + }, + "devDependencies": { + "@types/node": "catalog:", + "tsdown": "catalog:", + "typescript": "catalog:" + } +} diff --git a/sdk/otel/src/client.ts b/sdk/otel/src/client.ts new file mode 100644 index 0000000000..dc4bcfde6f --- /dev/null +++ b/sdk/otel/src/client.ts @@ -0,0 +1,21 @@ +import { configureLogger, disposeLogger, getLogger } from './logger'; +import type { LoggerConfigureOptions } from './logger'; +import { configureMetrics, disposeMetrics, getMeter } from './metrics'; +import type { MetricsConfigureOptions } from './metrics'; +import { configureTracing, disposeTracing, getCurrentSpanContext, getTracer } from './tracing'; +import type { TracingConfigureOptions } from './tracing'; +import type { OtelConfigureOptions } from './types'; + +export async function configureOtel(options: OtelConfigureOptions = {}) { + await Promise.all([ + configureLogger(options.logger ?? ({} satisfies LoggerConfigureOptions)), + configureMetrics(options.metrics ?? ({} satisfies MetricsConfigureOptions)), + configureTracing(options.tracing ?? ({} satisfies TracingConfigureOptions)) + ]); +} + +export async function disposeOtel() { + await Promise.all([disposeLogger(), disposeMetrics(), disposeTracing()]); +} + +export { getCurrentSpanContext, getLogger, getMeter, getTracer }; diff --git a/sdk/otel/src/env-utils.ts b/sdk/otel/src/env-utils.ts new file mode 100644 index 0000000000..010714da2f --- /dev/null +++ b/sdk/otel/src/env-utils.ts @@ -0,0 +1,32 @@ +export type EnvValue = string | boolean | number | undefined; + +export function parseBooleanEnv(value: EnvValue, defaultValue: boolean) { + if (typeof value === 'boolean') return value; + if (typeof value === 'number') return value !== 0; + if (typeof value !== 'string' || !value) return defaultValue; + + const normalized = value.trim().toLowerCase(); + if (['1', 'true', 'yes', 'on'].includes(normalized)) return true; + if (['0', 'false', 'no', 'off'].includes(normalized)) return false; + + return defaultValue; +} + +export function parseNumberEnv(value: EnvValue, defaultValue: number) { + if (typeof value === 'number' && Number.isFinite(value)) return value; + if (typeof value !== 'string') return defaultValue; + + const parsed = Number(value); + return Number.isFinite(parsed) ? parsed : defaultValue; +} + +export function parsePositiveNumberEnv(value: EnvValue, defaultValue: number) { + const parsed = parseNumberEnv(value, defaultValue); + return parsed > 0 ? parsed : defaultValue; +} + +export function parseStringEnv(value: EnvValue): string | undefined { + if (typeof value !== 'string') return undefined; + const trimmed = value.trim(); + return trimmed.length > 0 ? trimmed : undefined; +} diff --git a/sdk/otel/src/env.ts b/sdk/otel/src/env.ts new file mode 100644 index 0000000000..236192c9e0 --- /dev/null +++ b/sdk/otel/src/env.ts @@ -0,0 +1,71 @@ +import { + configureLoggerFromEnv, + createLoggerOptionsFromEnv, + type LoggerConfigureFromEnvOptions, + type LoggerEnv +} from './logger'; +import { + configureMetricsFromEnv, + createMetricsOptionsFromEnv, + type MetricsConfigureFromEnvOptions, + type MetricsEnv +} from './metrics'; +import { configureTracingFromEnv, createTracingOptionsFromEnv } from './tracing'; +import type { TracingConfigureFromEnvOptions, TracingEnv } from './tracing'; +import type { OtelConfigureOptions } from './types'; + +type OtelEnv = LoggerEnv & MetricsEnv & TracingEnv; + +export type OtelConfigureFromEnvOptions = { + env?: OtelEnv; + defaultServiceName?: string; + logger?: Omit; + metrics?: Omit; + tracing?: Omit; +}; + +export function createOtelOptionsFromEnv( + options: OtelConfigureFromEnvOptions = {} +): OtelConfigureOptions { + const env = options.env ?? process.env; + + return { + logger: createLoggerOptionsFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.logger + }), + metrics: createMetricsOptionsFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.metrics + }), + tracing: createTracingOptionsFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.tracing + }) + }; +} + +export async function configureOtelFromEnv(options: OtelConfigureFromEnvOptions = {}) { + const env = options.env ?? process.env; + + await Promise.all([ + configureLoggerFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.logger + }), + configureMetricsFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.metrics + }), + configureTracingFromEnv({ + env, + defaultServiceName: options.defaultServiceName, + ...options.tracing + }) + ]); +} diff --git a/sdk/otel/src/index.ts b/sdk/otel/src/index.ts new file mode 100644 index 0000000000..c9c107dbf5 --- /dev/null +++ b/sdk/otel/src/index.ts @@ -0,0 +1,7 @@ +export { configureOtel, disposeOtel } from './client'; +export { configureOtelFromEnv, createOtelOptionsFromEnv } from './env'; + +export * from './logger'; +export * from './metrics'; +export * from './tracing'; +export type * from './types'; diff --git a/sdk/otel/src/logger-entry.ts b/sdk/otel/src/logger-entry.ts new file mode 100644 index 0000000000..1ff09efd40 --- /dev/null +++ b/sdk/otel/src/logger-entry.ts @@ -0,0 +1 @@ +export * from './logger'; diff --git a/sdk/otel/src/logger/client.ts b/sdk/otel/src/logger/client.ts new file mode 100644 index 0000000000..07a4b413a1 --- /dev/null +++ b/sdk/otel/src/logger/client.ts @@ -0,0 +1,107 @@ +import { AsyncLocalStorage } from 'node:async_hooks'; +import { configure, dispose, getLogger as getLogtapeLogger } from '@logtape/logtape'; +import { createLoggers } from './loggers'; +import { createSinks } from './sinks'; +import type { LogCategory, LoggerConfigureOptions, LoggerContext } from './types'; + +let configured = false; +let configurePromise: Promise | null = null; +let defaultCategory: LogCategory = ['system']; + +export async function configureLogger(options: LoggerConfigureOptions = {}) { + if (configured) return; + if (configurePromise) return configurePromise; + + configurePromise = (async () => { + defaultCategory = options.defaultCategory ?? defaultCategory; + + const { sinks, composedSinks } = await createSinks({ + console: options.console, + otel: options.otel, + sensitiveProperties: options.sensitiveProperties + }); + + const loggers = options.loggers ?? createLoggers({ composedSinks }); + const contextLocalStorage = + options.contextLocalStorage ?? new AsyncLocalStorage(); + + await configure({ + contextLocalStorage, + loggers, + sinks + }); + + configured = true; + })(); + + try { + await configurePromise; + } catch (error) { + configurePromise = null; + throw error; + } +} + +export async function disposeLogger() { + if (configurePromise) { + try { + await configurePromise; + } catch { + configurePromise = null; + return; + } + } + + if (!configured) return; + + await dispose(); + + configured = false; + configurePromise = null; +} + +export function getLogger(category: LogCategory = defaultCategory) { + const logger = getLogtapeLogger(category); + + return new Proxy(logger, { + get(target, prop, receiver) { + const fn = Reflect.get(target, prop, receiver); + + if (typeof fn !== 'function') return fn; + + return (...args: unknown[]) => { + if (args.length === 0) return fn.call(target); + + const [firstArg, secondArg] = args; + + if (args.length === 1) { + return fn.call(target, firstArg); + } + + if (typeof firstArg === 'string') { + if ( + typeof secondArg === 'object' && + secondArg && + 'verbose' in secondArg && + typeof secondArg.verbose === 'boolean' && + !secondArg.verbose + ) { + const { verbose: _verbose, ...properties } = secondArg as Record & { + verbose?: boolean; + }; + + return fn.call(target, firstArg, properties); + } + + return fn.call(target, `${firstArg}: {*}`, secondArg); + } + + if (typeof firstArg === 'object') { + return fn.call(target, firstArg); + } + + return fn.apply(target, args); + }; + } + }); +} diff --git a/sdk/otel/src/logger/env.ts b/sdk/otel/src/logger/env.ts new file mode 100644 index 0000000000..5cf642e3b3 --- /dev/null +++ b/sdk/otel/src/logger/env.ts @@ -0,0 +1,62 @@ +import type { LogLevel } from '@logtape/logtape'; +import { configureLogger } from './client'; +import { parseBooleanEnv, parseStringEnv } from '../env-utils'; +import type { LogCategory, LoggerConfigureOptions } from './types'; + +export type LoggerEnvValue = string | boolean | number | undefined; +export type LoggerEnv = Record; + +export type LoggerConfigureFromEnvOptions = { + env?: LoggerEnv; + defaultCategory?: LogCategory; + defaultServiceName?: string; + defaultLoggerName?: string; + defaultConsoleEnabled?: boolean; + defaultConsoleLevel?: LogLevel; + defaultOtelEnabled?: boolean; + defaultOtelLevel?: LogLevel; + defaultOtelUrl?: string; + sensitiveProperties?: readonly string[]; +}; + +const logLevels = new Set(['trace', 'debug', 'info', 'warning', 'error', 'fatal']); + +function parseLogLevel(value: LoggerEnvValue, defaultValue: LogLevel): LogLevel { + if (typeof value !== 'string') return defaultValue; + + return logLevels.has(value as LogLevel) ? (value as LogLevel) : defaultValue; +} + +export function createLoggerOptionsFromEnv( + options: LoggerConfigureFromEnvOptions = {} +): LoggerConfigureOptions { + const env = options.env ?? process.env; + const defaultServiceName = options.defaultServiceName ?? 'app'; + const serviceName = parseStringEnv(env.LOG_OTEL_SERVICE_NAME) ?? defaultServiceName; + const loggerName = + parseStringEnv(env.LOG_OTEL_LOGGER_NAME) ?? options.defaultLoggerName ?? serviceName; + + return { + defaultCategory: options.defaultCategory, + console: { + enabled: parseBooleanEnv(env.LOG_ENABLE_CONSOLE, options.defaultConsoleEnabled ?? true), + level: parseLogLevel(env.LOG_CONSOLE_LEVEL, options.defaultConsoleLevel ?? 'trace') + }, + otel: parseBooleanEnv(env.LOG_ENABLE_OTEL, options.defaultOtelEnabled ?? false) + ? { + serviceName, + loggerName, + url: + parseStringEnv(env.LOG_OTEL_URL) ?? + options.defaultOtelUrl ?? + 'http://localhost:4318/v1/logs', + level: parseLogLevel(env.LOG_OTEL_LEVEL, options.defaultOtelLevel ?? 'info') + } + : false, + sensitiveProperties: options.sensitiveProperties + }; +} + +export async function configureLoggerFromEnv(options: LoggerConfigureFromEnvOptions = {}) { + return configureLogger(createLoggerOptionsFromEnv(options)); +} diff --git a/sdk/otel/src/logger/helpers.ts b/sdk/otel/src/logger/helpers.ts new file mode 100644 index 0000000000..07ba32a04e --- /dev/null +++ b/sdk/otel/src/logger/helpers.ts @@ -0,0 +1,20 @@ +import { SeverityNumber } from '@opentelemetry/api-logs'; + +export function mapLevelToSeverityNumber(level: string): number { + switch (level) { + case 'trace': + return SeverityNumber.TRACE; + case 'debug': + return SeverityNumber.DEBUG; + case 'info': + return SeverityNumber.INFO; + case 'warning': + return SeverityNumber.WARN; + case 'error': + return SeverityNumber.ERROR; + case 'fatal': + return SeverityNumber.FATAL; + default: + return SeverityNumber.UNSPECIFIED; + } +} diff --git a/sdk/otel/src/logger/index.ts b/sdk/otel/src/logger/index.ts new file mode 100644 index 0000000000..0c46bd995e --- /dev/null +++ b/sdk/otel/src/logger/index.ts @@ -0,0 +1,22 @@ +export { configureLogger, disposeLogger, getLogger } from './client'; +export { withContext, withCategoryPrefix } from '@logtape/logtape'; +export { getOpenTelemetrySink } from './otel'; +export type { + BodyFormatter, + ExceptionAttributeMode, + ObjectRenderer, + OpenTelemetrySink, + OpenTelemetrySinkOptions +} from './otel'; +export type { + ConsoleLoggerOptions, + LogCategory, + LoggerConfig, + LoggerConfigureOptions, + LoggerContext, + LoggerSinkId, + OtelLoggerOptions +} from './types'; + +export { configureLoggerFromEnv, createLoggerOptionsFromEnv } from './env'; +export type { LoggerConfigureFromEnvOptions, LoggerEnv } from './env'; diff --git a/sdk/otel/src/logger/loggers.ts b/sdk/otel/src/logger/loggers.ts new file mode 100644 index 0000000000..5932fcc8eb --- /dev/null +++ b/sdk/otel/src/logger/loggers.ts @@ -0,0 +1,29 @@ +import type { LogTapeConfig, LoggerSinkId } from './types'; + +type LoggerConfig = LogTapeConfig['loggers']; + +type CreateLoggersOptions = { + composedSinks: LoggerSinkId[]; +}; + +export function createLoggers({ composedSinks }: CreateLoggersOptions): LoggerConfig { + const metaSinks: LoggerSinkId[] = composedSinks.includes('console') ? ['console'] : composedSinks; + + return [ + { + category: [], + lowestLevel: 'trace', + sinks: composedSinks + }, + ...(metaSinks.length === 0 + ? [] + : [ + { + category: ['logtape', 'meta'], + lowestLevel: 'fatal' as const, + parentSinks: 'override' as const, + sinks: metaSinks + } + ]) + ]; +} diff --git a/sdk/otel/src/logger/otel.ts b/sdk/otel/src/logger/otel.ts new file mode 100644 index 0000000000..79ebbbc2e6 --- /dev/null +++ b/sdk/otel/src/logger/otel.ts @@ -0,0 +1,476 @@ +import { getLogger, type Logger, type LogRecord, type Sink } from '@logtape/logtape'; +import { diag, type DiagLogger, DiagLogLevel } from '@opentelemetry/api'; +import { + type AnyValue, + type Logger as OTLogger, + type LoggerProvider as LoggerProviderBase, + type LogRecord as OTLogRecord, + NOOP_LOGGER +} from '@opentelemetry/api-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; +import type { Resource } from '@opentelemetry/resources'; +import { defaultResource, resourceFromAttributes } from '@opentelemetry/resources'; +import { LoggerProvider, SimpleLogRecordProcessor } from '@opentelemetry/sdk-logs'; +import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; +import { inspect as nodeInspect } from 'util'; +import { mapLevelToSeverityNumber } from './helpers'; + +function getEnvironmentVariable(name: string): string | undefined { + return process.env[name]; +} + +type OtlpHttpExporterConfig = ConstructorParameters[0]; + +function hasOtlpEndpoint(config?: OtlpHttpExporterConfig): boolean { + if (config?.url) return true; + + const logsEndpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_LOGS_ENDPOINT'); + if (logsEndpoint) return true; + + const endpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_ENDPOINT'); + if (endpoint) return true; + + return false; +} + +type ILoggerProvider = LoggerProviderBase & { + shutdown?: () => Promise; +}; + +export type ObjectRenderer = 'json' | 'inspect'; + +type Message = (string | null | undefined)[]; + +export type BodyFormatter = (message: Message) => AnyValue; + +export type ExceptionAttributeMode = 'semconv' | 'raw' | false; + +interface OpenTelemetrySinkOptionsBase { + messageType?: 'string' | 'array' | BodyFormatter; + objectRenderer?: ObjectRenderer; + exceptionAttributes?: ExceptionAttributeMode; + diagnostics?: boolean; + loggerName?: string; +} + +export interface OpenTelemetrySinkProviderOptions extends OpenTelemetrySinkOptionsBase { + loggerProvider: ILoggerProvider; +} + +export interface OpenTelemetrySinkExporterOptions extends OpenTelemetrySinkOptionsBase { + loggerProvider?: undefined; + otlpExporterConfig?: OtlpHttpExporterConfig; + serviceName?: string; + additionalResource?: Resource; +} + +export type OpenTelemetrySinkOptions = + | OpenTelemetrySinkProviderOptions + | OpenTelemetrySinkExporterOptions; + +const noopLoggerProvider: ILoggerProvider = { + getLogger: () => NOOP_LOGGER +}; + +async function initializeLoggerProvider( + options: OpenTelemetrySinkExporterOptions +): Promise { + if (!hasOtlpEndpoint(options.otlpExporterConfig)) { + return noopLoggerProvider; + } + + const resource = defaultResource().merge( + resourceFromAttributes({ + [ATTR_SERVICE_NAME]: options.serviceName ?? getEnvironmentVariable('OTEL_SERVICE_NAME') + }).merge(options.additionalResource ?? null) + ); + + const otlpExporter = new OTLPLogExporter(options.otlpExporterConfig); + const loggerProvider = new LoggerProvider({ + resource, + processors: [new SimpleLogRecordProcessor(otlpExporter)] + }); + + return loggerProvider; +} + +function emitLogRecord( + logger: OTLogger, + record: LogRecord, + options: OpenTelemetrySinkOptions +): void { + const objectRenderer = options.objectRenderer ?? 'inspect'; + const exceptionMode = options.exceptionAttributes ?? 'semconv'; + const { category, level, message, timestamp, properties } = record; + const severityNumber = mapLevelToSeverityNumber(level); + const attributes = convertToAttributes(properties ?? {}, objectRenderer, exceptionMode); + + attributes['category'] = [...category]; + + logger.emit({ + severityNumber, + severityText: level, + body: + typeof options.messageType === 'function' + ? convertMessageToCustomBodyFormat( + message, + objectRenderer, + exceptionMode, + options.messageType + ) + : options.messageType === 'array' + ? convertMessageToArray(message, objectRenderer, exceptionMode) + : convertMessageToString(message, objectRenderer, exceptionMode), + attributes, + timestamp: new Date(timestamp) + } satisfies OTLogRecord); +} + +export interface OpenTelemetrySink extends Sink, AsyncDisposable { + readonly ready: Promise; +} + +function getOpenTelemetryLoggerName(options: OpenTelemetrySinkOptions): string { + const serviceName = 'serviceName' in options ? options.serviceName : undefined; + + return options.loggerName ?? serviceName ?? 'app'; +} + +export function getOpenTelemetrySink(options: OpenTelemetrySinkOptions = {}): OpenTelemetrySink { + if (options.diagnostics) { + diag.setLogger(new DiagLoggerAdaptor(), DiagLogLevel.DEBUG); + } + + if (options.loggerProvider != null) { + const loggerProvider = options.loggerProvider; + const logger = loggerProvider.getLogger(getOpenTelemetryLoggerName(options)); + const shutdown = loggerProvider.shutdown?.bind(loggerProvider); + const sink: OpenTelemetrySink = Object.assign( + (record: LogRecord) => { + const { category } = record; + if (category[0] === 'logtape' && category[1] === 'meta' && category[2] === 'otel') { + return; + } + emitLogRecord(logger, record, options); + }, + { + ready: Promise.resolve(), + async [Symbol.asyncDispose](): Promise { + if (shutdown != null) await shutdown(); + } + } + ); + return sink; + } + + let loggerProvider: ILoggerProvider | null = null; + let logger: OTLogger | null = null; + let initPromise: Promise | null = null; + let initError: Error | null = null; + let pendingRecords: LogRecord[] = []; + + const sink: OpenTelemetrySink = Object.assign( + (record: LogRecord) => { + const { category } = record; + if (category[0] === 'logtape' && category[1] === 'meta' && category[2] === 'otel') { + return; + } + + if (logger != null) { + emitLogRecord(logger, record, options); + return; + } + + if (initError != null) { + return; + } + + pendingRecords.push(record); + + if (initPromise == null) { + initPromise = initializeLoggerProvider(options) + .then((provider) => { + loggerProvider = provider; + logger = provider.getLogger(getOpenTelemetryLoggerName(options)); + for (const pendingRecord of pendingRecords) { + emitLogRecord(logger, pendingRecord, options); + } + pendingRecords = []; + }) + .catch((error) => { + initError = error as Error; + pendingRecords = []; + // eslint-disable-next-line no-console + console.error('Failed to initialize OpenTelemetry logger:', error); + }); + } + }, + { + get ready(): Promise { + return initPromise ?? Promise.resolve(); + }, + async [Symbol.asyncDispose](): Promise { + if (initPromise != null) { + try { + await initPromise; + } catch { + return; + } + } + if (loggerProvider?.shutdown != null) { + await loggerProvider.shutdown(); + } + } + } + ); + + return sink; +} + +function convertValueToAnyValue( + value: unknown, + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode +): AnyValue | null { + if (value == null) return null; + + if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') { + return value; + } + + if (Array.isArray(value)) { + let primitiveType: string | null = null; + let isHomogeneous = true; + + for (const item of value) { + if (item == null) continue; + const itemType = typeof item; + if (itemType !== 'string' && itemType !== 'number' && itemType !== 'boolean') { + isHomogeneous = false; + break; + } + if (primitiveType === null) { + primitiveType = itemType; + } else if (primitiveType !== itemType) { + isHomogeneous = false; + break; + } + } + + if (isHomogeneous && primitiveType !== null) { + return value as AnyValue; + } + + const converted: AnyValue[] = []; + for (const item of value) { + const convertedItem = convertValueToAnyValue(item, objectRenderer, exceptionMode); + if (convertedItem !== null) { + converted.push(convertedItem); + } + } + return converted; + } + + if (value instanceof Date) { + return value.toISOString(); + } + + if (value instanceof Error) { + const errorObj = serializeValue(value) as Record; + const converted: Record = {}; + for (const [key, val] of Object.entries(errorObj)) { + const convertedVal = convertValueToAnyValue(val, objectRenderer, exceptionMode); + if (convertedVal !== null) { + converted[key] = convertedVal; + } + } + return converted; + } + + if (typeof value === 'object') { + const proto = Object.getPrototypeOf(value); + const isPlainObject = proto === Object.prototype || proto === null; + + if (isPlainObject) { + const converted: Record = {}; + for (const [key, val] of Object.entries(value as Record)) { + const convertedVal = convertValueToAnyValue(val, objectRenderer, exceptionMode); + if (convertedVal !== null) { + converted[key] = convertedVal; + } + } + return converted; + } + + if (objectRenderer === 'inspect') { + return nodeInspect(value); + } + return JSON.stringify(value); + } + + return String(value); +} + +function convertToAttributes( + properties: Record, + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode +): Record { + const attributes: Record = {}; + for (const [name, value] of Object.entries(properties)) { + if (value == null) continue; + + if (value instanceof Error && exceptionMode === 'semconv') { + attributes['exception.type'] = value.name; + attributes['exception.message'] = value.message; + if (typeof value.stack === 'string') { + attributes['exception.stacktrace'] = value.stack; + } + continue; + } + + const convertedValue = convertValueToAnyValue(value, objectRenderer, exceptionMode); + if (convertedValue !== null) { + attributes[name] = convertedValue; + } + } + return attributes; +} + +function serializeValue(value: unknown): unknown { + if (value instanceof Error) { + const serialized: Record = { + name: value.name, + message: value.message + }; + + if (typeof value.stack === 'string') { + serialized.stack = value.stack; + } + + const cause = (value as { cause?: unknown }).cause; + if (cause !== undefined) { + serialized.cause = serializeValue(cause); + } + + if (typeof AggregateError !== 'undefined' && value instanceof AggregateError) { + serialized.errors = value.errors.map(serializeValue); + } + + for (const key of Object.keys(value)) { + if (!(key in serialized)) { + serialized[key] = serializeValue((value as unknown as Record)[key]); + } + } + + return serialized; + } + + if (Array.isArray(value)) { + return value.map(serializeValue); + } + + if (value !== null && typeof value === 'object') { + const serialized: Record = {}; + for (const [key, val] of Object.entries(value)) { + serialized[key] = serializeValue(val); + } + return serialized; + } + + return value; +} + +function convertToString( + value: unknown, + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode +): string | null | undefined { + if (value === null || value === undefined || typeof value === 'string') { + return value; + } + if (objectRenderer === 'inspect') return nodeInspect(value); + if (typeof value === 'number' || typeof value === 'boolean') { + return value.toString(); + } + if (value instanceof Date) return value.toISOString(); + if (value instanceof Error && (exceptionMode === 'raw' || exceptionMode === 'semconv')) { + return JSON.stringify(serializeValue(value)); + } + return JSON.stringify(value); +} + +function convertMessageToArray( + message: readonly unknown[], + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode +): AnyValue { + const body: (string | null | undefined)[] = []; + for (let i = 0; i < message.length; i += 2) { + const msg = message[i] as string; + body.push(msg); + if (message.length <= i + 1) break; + const val = message[i + 1]; + body.push(convertToString(val, objectRenderer, exceptionMode)); + } + return body; +} + +function convertMessageToString( + message: readonly unknown[], + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode +): AnyValue { + let body = ''; + for (let i = 0; i < message.length; i += 2) { + const msg = message[i] as string; + body += msg; + if (message.length <= i + 1) break; + const val = message[i + 1]; + const extra = convertToString(val, objectRenderer, exceptionMode); + body += extra ?? JSON.stringify(extra); + } + return body; +} + +function convertMessageToCustomBodyFormat( + message: readonly unknown[], + objectRenderer: ObjectRenderer, + exceptionMode: ExceptionAttributeMode, + bodyFormatter: BodyFormatter +): AnyValue { + const body = message.map((msg) => convertToString(msg, objectRenderer, exceptionMode)); + return bodyFormatter(body); +} + +class DiagLoggerAdaptor implements DiagLogger { + logger: Logger; + + constructor() { + this.logger = getLogger(['logtape', 'meta', 'otel']); + } + + #escape(msg: string): string { + return msg.replaceAll('{', '{{').replaceAll('}', '}}'); + } + + error(msg: string, ...values: unknown[]): void { + this.logger.error(`${this.#escape(msg)}: {values}`, { values }); + } + + warn(msg: string, ...values: unknown[]): void { + this.logger.warn(`${this.#escape(msg)}: {values}`, { values }); + } + + info(msg: string, ...values: unknown[]): void { + this.logger.info(`${this.#escape(msg)}: {values}`, { values }); + } + + debug(msg: string, ...values: unknown[]): void { + this.logger.debug(`${this.#escape(msg)}: {values}`, { values }); + } + + verbose(msg: string, ...values: unknown[]): void { + this.logger.debug(`${this.#escape(msg)}: {values}`, { values }); + } +} diff --git a/sdk/otel/src/logger/sinks.ts b/sdk/otel/src/logger/sinks.ts new file mode 100644 index 0000000000..50fd723d3e --- /dev/null +++ b/sdk/otel/src/logger/sinks.ts @@ -0,0 +1,149 @@ +import type { LogLevel, LogRecord } from '@logtape/logtape'; +import { getConsoleSink, withFilter } from '@logtape/logtape'; +import { getPrettyFormatter } from '@logtape/pretty'; +import { mapLevelToSeverityNumber } from './helpers'; +import { getOpenTelemetrySink } from './otel'; +import type { + ConsoleLoggerOptions, + LogTapeConfig, + LoggerConfigureOptions, + LoggerSinkId, + OtelLoggerOptions +} from './types'; + +type SinkConfig = LogTapeConfig['sinks']; + +type CreateSinksOptions = Pick; + +type CreateSinksResult = { + sinks: SinkConfig; + composedSinks: LoggerSinkId[]; +}; + +const defaultConsoleOptions: Required = { + enabled: true, + level: 'trace' +}; + +const defaultOtelOptions = { + enabled: false, + level: 'info' as LogLevel +}; + +function normalizeConsoleOptions( + options?: boolean | ConsoleLoggerOptions +): Required { + if (typeof options === 'boolean') { + return { + ...defaultConsoleOptions, + enabled: options + }; + } + + return { + enabled: options?.enabled ?? defaultConsoleOptions.enabled, + level: options?.level ?? defaultConsoleOptions.level + }; +} + +function normalizeOtelOptions(options?: false | OtelLoggerOptions) { + if (!options) { + return { + ...defaultOtelOptions, + serviceName: undefined, + url: undefined, + loggerName: undefined + }; + } + + return { + enabled: options.enabled ?? true, + level: options.level ?? defaultOtelOptions.level, + serviceName: options.serviceName, + url: options.url, + loggerName: options.loggerName ?? options.serviceName + }; +} + +function pad(value: number) { + return value.toString().padStart(2, '0'); +} + +function formatTimestamp(timestamp: number | Date) { + const date = timestamp instanceof Date ? timestamp : new Date(timestamp); + + return `${date.getFullYear()}-${pad(date.getMonth() + 1)}-${pad(date.getDate())} ${pad( + date.getHours() + )}:${pad(date.getMinutes())}:${pad(date.getSeconds())}`; +} + +export async function createSinks(options: CreateSinksOptions): Promise { + const consoleOptions = normalizeConsoleOptions(options.console); + const otelOptions = normalizeOtelOptions(options.otel); + const sensitiveProperties = options.sensitiveProperties ?? []; + + const sinkConfig = { + bufferSize: 8192, + flushInterval: 5000, + nonBlocking: true, + lazy: true + } as const; + + const sinks: SinkConfig = {}; + const composedSinks: LoggerSinkId[] = []; + + const levelFilter = (record: LogRecord, level: LogLevel) => { + return mapLevelToSeverityNumber(record.level) >= mapLevelToSeverityNumber(level); + }; + + if (consoleOptions.enabled) { + sinks.console = withFilter( + getConsoleSink({ + ...sinkConfig, + formatter: getPrettyFormatter({ + icons: false, + level: 'ABBR', + wordWrap: false, + messageColor: null, + categoryColor: null, + timestampColor: null, + levelStyle: 'reset', + messageStyle: 'reset', + categoryStyle: 'reset', + timestampStyle: 'reset', + categorySeparator: ':', + timestamp: formatTimestamp, + inspectOptions: { depth: 5 } + }) + }), + (record) => levelFilter(record, consoleOptions.level) + ); + composedSinks.push('console'); + } + + if (otelOptions.enabled) { + if (!otelOptions.serviceName) { + throw new Error('`otel.serviceName` is required when OpenTelemetry logging is enabled'); + } + + sinks.otel = withFilter( + getOpenTelemetrySink({ + serviceName: otelOptions.serviceName, + loggerName: otelOptions.loggerName, + otlpExporterConfig: otelOptions.url ? { url: otelOptions.url } : undefined + }), + (record) => { + const properties = record.properties ?? {}; + + return ( + levelFilter(record, otelOptions.level) && + !sensitiveProperties.some((property) => property in properties) + ); + } + ); + + composedSinks.push('otel'); + } + + return { sinks, composedSinks }; +} diff --git a/sdk/otel/src/logger/types.ts b/sdk/otel/src/logger/types.ts new file mode 100644 index 0000000000..b8d259a72e --- /dev/null +++ b/sdk/otel/src/logger/types.ts @@ -0,0 +1,37 @@ +import type { AsyncLocalStorage } from 'node:async_hooks'; +import type { Config, LogLevel } from '@logtape/logtape'; + +export type LogCategory = readonly string[]; +export type LoggerContext = Record; +export type LoggerSinkId = 'console' | 'otel'; + +type FilterId = string; + +export type LogTapeConfig = Config< + S, + F +>; + +export type LoggerConfig = LogTapeConfig['loggers']; + +export type ConsoleLoggerOptions = { + enabled?: boolean; + level?: LogLevel; +}; + +export type OtelLoggerOptions = { + enabled?: boolean; + level?: LogLevel; + serviceName: string; + url?: string; + loggerName?: string; +}; + +export type LoggerConfigureOptions = { + console?: boolean | ConsoleLoggerOptions; + otel?: false | OtelLoggerOptions; + contextLocalStorage?: AsyncLocalStorage; + loggers?: LoggerConfig; + sensitiveProperties?: readonly string[]; + defaultCategory?: LogCategory; +}; diff --git a/sdk/otel/src/metrics-entry.ts b/sdk/otel/src/metrics-entry.ts new file mode 100644 index 0000000000..03a04202cc --- /dev/null +++ b/sdk/otel/src/metrics-entry.ts @@ -0,0 +1 @@ +export * from './metrics'; diff --git a/sdk/otel/src/metrics/client.ts b/sdk/otel/src/metrics/client.ts new file mode 100644 index 0000000000..7aa4eb9090 --- /dev/null +++ b/sdk/otel/src/metrics/client.ts @@ -0,0 +1,138 @@ +import { metrics } from '@opentelemetry/api'; +import { OTLPMetricExporter } from '@opentelemetry/exporter-metrics-otlp-http'; +import { defaultResource, resourceFromAttributes } from '@opentelemetry/resources'; +import { MeterProvider, PeriodicExportingMetricReader } from '@opentelemetry/sdk-metrics'; +import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; +import type { MetricsConfigureOptions, MetricsOptions } from './types'; + +type OtlpMetricExporterConfig = ConstructorParameters[0]; + +let configured = false; +let configurePromise: Promise | null = null; +let meterProvider: MeterProvider | null = null; +let defaultMeterName = 'fastgpt'; +let defaultMeterVersion: string | undefined; + +function getEnvironmentVariable(name: string): string | undefined { + return process.env[name]; +} + +function hasOtlpEndpoint(config?: OtlpMetricExporterConfig): boolean { + if (config?.url) return true; + if (getEnvironmentVariable('OTEL_EXPORTER_OTLP_METRICS_ENDPOINT')) return true; + if (getEnvironmentVariable('OTEL_EXPORTER_OTLP_ENDPOINT')) return true; + return false; +} + +function normalizeOtlpMetricsUrl(url: string) { + const trimmed = url.trim(); + if (!trimmed) return trimmed; + if (trimmed.endsWith('/v1/metrics')) return trimmed; + return `${trimmed.replace(/\/+$/, '')}/v1/metrics`; +} + +function resolveOtlpMetricsUrl(config?: OtlpMetricExporterConfig) { + if (config?.url) return config.url; + + const metricsEndpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_METRICS_ENDPOINT'); + if (metricsEndpoint) return metricsEndpoint; + + const endpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_ENDPOINT'); + if (endpoint) return normalizeOtlpMetricsUrl(endpoint); + + return undefined; +} + +function normalizeMetricsOptions(options?: false | MetricsOptions) { + if (options === false) { + return { + enabled: false, + exportIntervalMillis: 15000 + }; + } + + return { + enabled: options?.enabled ?? false, + serviceName: options?.serviceName, + exportIntervalMillis: options?.exportIntervalMillis ?? 15000, + otlpExporterConfig: { + url: options?.url, + headers: options?.headers + } satisfies OtlpMetricExporterConfig, + additionalResource: options?.additionalResource ?? null + }; +} + +export async function configureMetrics(options: MetricsConfigureOptions = {}) { + if (configured) return; + if (configurePromise) return configurePromise; + + configurePromise = (async () => { + const metricsOptions = normalizeMetricsOptions(options.metrics); + defaultMeterName = options.defaultMeterName ?? defaultMeterName; + defaultMeterVersion = options.defaultMeterVersion ?? defaultMeterVersion; + + const resource = defaultResource().merge( + resourceFromAttributes({ + [ATTR_SERVICE_NAME]: + metricsOptions.serviceName ?? + getEnvironmentVariable('OTEL_SERVICE_NAME') ?? + defaultMeterName + }).merge(metricsOptions.additionalResource ?? null) + ); + + const readers: PeriodicExportingMetricReader[] = []; + + if (metricsOptions.enabled && hasOtlpEndpoint(metricsOptions.otlpExporterConfig)) { + const exporter = new OTLPMetricExporter({ + ...metricsOptions.otlpExporterConfig, + url: resolveOtlpMetricsUrl(metricsOptions.otlpExporterConfig) + }); + + readers.push( + new PeriodicExportingMetricReader({ + exporter, + exportIntervalMillis: metricsOptions.exportIntervalMillis + }) + ); + } + + meterProvider = new MeterProvider({ + resource, + readers + }); + metrics.setGlobalMeterProvider(meterProvider); + + configured = true; + })(); + + try { + await configurePromise; + } catch (error) { + configurePromise = null; + throw error; + } +} + +export async function disposeMetrics() { + if (configurePromise) { + try { + await configurePromise; + } catch { + configurePromise = null; + return; + } + } + + if (!configured || !meterProvider) return; + + await meterProvider.shutdown(); + + configured = false; + configurePromise = null; + meterProvider = null; +} + +export function getMeter(name = defaultMeterName, version = defaultMeterVersion) { + return metrics.getMeter(name, version); +} diff --git a/sdk/otel/src/metrics/env.ts b/sdk/otel/src/metrics/env.ts new file mode 100644 index 0000000000..367df9fa33 --- /dev/null +++ b/sdk/otel/src/metrics/env.ts @@ -0,0 +1,52 @@ +import { configureMetrics } from './client'; +import { parseBooleanEnv, parsePositiveNumberEnv, parseStringEnv } from '../env-utils'; +import type { MetricsConfigureOptions } from './types'; + +export type MetricsEnvValue = string | boolean | number | undefined; +export type MetricsEnv = Record; + +export type MetricsConfigureFromEnvOptions = { + env?: MetricsEnv; + defaultServiceName?: string; + defaultMeterName?: string; + defaultMetricsEnabled?: boolean; + defaultMetricsUrl?: string; + defaultExportIntervalMillis?: number; +}; + +export function createMetricsOptionsFromEnv( + options: MetricsConfigureFromEnvOptions = {} +): MetricsConfigureOptions { + const env = options.env ?? process.env; + + const metricsExporter = parseStringEnv(env.OTEL_METRICS_EXPORTER)?.toLowerCase(); + const enabled = parseBooleanEnv( + env.METRICS_ENABLE_OTEL, + metricsExporter === 'otlp' || options.defaultMetricsEnabled === true + ); + + return { + defaultMeterName: options.defaultMeterName ?? options.defaultServiceName ?? 'fastgpt', + metrics: enabled + ? { + enabled: true, + serviceName: + parseStringEnv(env.METRICS_OTEL_SERVICE_NAME) ?? + parseStringEnv(env.OTEL_SERVICE_NAME) ?? + options.defaultServiceName, + url: + parseStringEnv(env.METRICS_OTEL_URL) ?? + parseStringEnv(env.OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) ?? + options.defaultMetricsUrl, + exportIntervalMillis: parsePositiveNumberEnv( + env.METRICS_EXPORT_INTERVAL ?? env.OTEL_METRIC_EXPORT_INTERVAL, + options.defaultExportIntervalMillis ?? 15000 + ) + } + : false + }; +} + +export async function configureMetricsFromEnv(options: MetricsConfigureFromEnvOptions = {}) { + return configureMetrics(createMetricsOptionsFromEnv(options)); +} diff --git a/sdk/otel/src/metrics/index.ts b/sdk/otel/src/metrics/index.ts new file mode 100644 index 0000000000..a3da8e2b80 --- /dev/null +++ b/sdk/otel/src/metrics/index.ts @@ -0,0 +1,4 @@ +export { configureMetrics, disposeMetrics, getMeter } from './client'; +export { configureMetricsFromEnv, createMetricsOptionsFromEnv } from './env'; +export type { MetricsConfigureOptions, MetricsOptions, MetricAttributes } from './types'; +export type { MetricsConfigureFromEnvOptions, MetricsEnv } from './env'; diff --git a/sdk/otel/src/metrics/types.ts b/sdk/otel/src/metrics/types.ts new file mode 100644 index 0000000000..98b9e24aaf --- /dev/null +++ b/sdk/otel/src/metrics/types.ts @@ -0,0 +1,19 @@ +import type { Resource } from '@opentelemetry/resources'; + +export type MetricsOptions = { + enabled?: boolean; + serviceName?: string; + url?: string; + headers?: Record; + exportIntervalMillis?: number; + additionalResource?: Resource | null; +}; + +export type MetricsConfigureOptions = { + defaultMeterName?: string; + defaultMeterVersion?: string; + metrics?: false | MetricsOptions; +}; + +export type MetricAttributeValue = string | number | boolean; +export type MetricAttributes = Record; diff --git a/sdk/otel/src/tracing-entry.ts b/sdk/otel/src/tracing-entry.ts new file mode 100644 index 0000000000..9e9f86336d --- /dev/null +++ b/sdk/otel/src/tracing-entry.ts @@ -0,0 +1,3 @@ +export { configureTracing, disposeTracing, getCurrentSpanContext, getTracer } from './tracing'; +export { configureTracingFromEnv, createTracingOptionsFromEnv } from './tracing'; +export type * from './tracing'; diff --git a/sdk/otel/src/tracing/client.ts b/sdk/otel/src/tracing/client.ts new file mode 100644 index 0000000000..d59d49aec6 --- /dev/null +++ b/sdk/otel/src/tracing/client.ts @@ -0,0 +1,162 @@ +import { trace } from '@opentelemetry/api'; +import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-http'; +import { defaultResource, resourceFromAttributes } from '@opentelemetry/resources'; +import { + BatchSpanProcessor, + ParentBasedSampler, + TraceIdRatioBasedSampler +} from '@opentelemetry/sdk-trace-base'; +import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node'; +import { ATTR_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; +import type { TracingConfigureOptions, TracingOptions } from './types'; + +type OtlpTraceExporterConfig = ConstructorParameters[0]; + +let configured = false; +let configurePromise: Promise | null = null; +let tracerProvider: NodeTracerProvider | null = null; +let defaultTracerName = 'fastgpt'; +let defaultTracerVersion: string | undefined; + +function getEnvironmentVariable(name: string): string | undefined { + return process.env[name]; +} + +function hasOtlpEndpoint(config?: OtlpTraceExporterConfig): boolean { + if (config?.url) return true; + if (getEnvironmentVariable('OTEL_EXPORTER_OTLP_TRACES_ENDPOINT')) return true; + if (getEnvironmentVariable('OTEL_EXPORTER_OTLP_ENDPOINT')) return true; + return false; +} + +function normalizeOtlpTracesUrl(url: string) { + const trimmed = url.trim(); + if (!trimmed) return trimmed; + if (trimmed.endsWith('/v1/traces')) return trimmed; + return `${trimmed.replace(/\/+$/, '')}/v1/traces`; +} + +function resolveOtlpTracesUrl(config?: OtlpTraceExporterConfig) { + if (config?.url) return config.url; + + const tracesEndpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_TRACES_ENDPOINT'); + if (tracesEndpoint) return tracesEndpoint; + + const endpoint = getEnvironmentVariable('OTEL_EXPORTER_OTLP_ENDPOINT'); + if (endpoint) return normalizeOtlpTracesUrl(endpoint); + + return undefined; +} + +function normalizeSampleRatio(value: number | undefined, defaultValue: number) { + if (typeof value !== 'number' || !Number.isFinite(value)) return defaultValue; + return Math.max(0, Math.min(1, value)); +} + +function normalizeTracingOptions(options?: false | TracingOptions) { + if (options === false) { + return { + enabled: false, + sampleRatio: 1 + }; + } + + return { + enabled: options?.enabled ?? false, + serviceName: options?.serviceName, + sampleRatio: normalizeSampleRatio(options?.sampleRatio, 1), + otlpExporterConfig: { + url: options?.url, + headers: options?.headers + } satisfies OtlpTraceExporterConfig, + additionalResource: options?.additionalResource ?? null + }; +} + +export async function configureTracing(options: TracingConfigureOptions = {}) { + if (configured) return; + if (configurePromise) return configurePromise; + + configurePromise = (async () => { + const tracingOptions = normalizeTracingOptions(options.tracing); + defaultTracerName = options.defaultTracerName ?? defaultTracerName; + defaultTracerVersion = options.defaultTracerVersion ?? defaultTracerVersion; + + if (!tracingOptions.enabled) { + configured = true; + return; + } + + const resource = defaultResource().merge( + resourceFromAttributes({ + [ATTR_SERVICE_NAME]: + tracingOptions.serviceName ?? + getEnvironmentVariable('OTEL_SERVICE_NAME') ?? + defaultTracerName + }).merge(tracingOptions.additionalResource ?? null) + ); + + const spanProcessors = []; + + if (hasOtlpEndpoint(tracingOptions.otlpExporterConfig)) { + const exporter = new OTLPTraceExporter({ + ...tracingOptions.otlpExporterConfig, + url: resolveOtlpTracesUrl(tracingOptions.otlpExporterConfig) + }); + + spanProcessors.push(new BatchSpanProcessor(exporter)); + } + + tracerProvider = new NodeTracerProvider({ + resource, + sampler: new ParentBasedSampler({ + root: new TraceIdRatioBasedSampler(tracingOptions.sampleRatio) + }), + spanProcessors + }); + + tracerProvider.register(); + + configured = true; + })(); + + try { + await configurePromise; + } catch (error) { + configurePromise = null; + throw error; + } +} + +export async function disposeTracing() { + if (configurePromise) { + try { + await configurePromise; + } catch { + configurePromise = null; + return; + } + } + + if (!configured) return; + + if (!tracerProvider) { + configured = false; + configurePromise = null; + return; + } + + await tracerProvider.shutdown(); + + configured = false; + configurePromise = null; + tracerProvider = null; +} + +export function getTracer(name = defaultTracerName, version = defaultTracerVersion) { + return trace.getTracer(name, version); +} + +export function getCurrentSpanContext() { + return trace.getActiveSpan()?.spanContext(); +} diff --git a/sdk/otel/src/tracing/env.ts b/sdk/otel/src/tracing/env.ts new file mode 100644 index 0000000000..c4a91cdefd --- /dev/null +++ b/sdk/otel/src/tracing/env.ts @@ -0,0 +1,74 @@ +import { configureTracing } from './client'; +import { parseBooleanEnv, parseNumberEnv, parseStringEnv } from '../env-utils'; +import type { TracingConfigureOptions } from './types'; + +export type TracingEnvValue = string | boolean | number | undefined; +export type TracingEnv = Record; + +export type TracingConfigureFromEnvOptions = { + env?: TracingEnv; + defaultServiceName?: string; + defaultTracerName?: string; + defaultTracingEnabled?: boolean; + defaultTracingUrl?: string; + defaultSampleRatio?: number; +}; + +function normalizeSampleRatio(value: number, defaultValue: number) { + if (!Number.isFinite(value)) return defaultValue; + return Math.max(0, Math.min(1, value)); +} + +function getSampleRatioFromStandardEnv(env: TracingEnv, defaultValue: number): number { + const sampler = parseStringEnv(env.OTEL_TRACES_SAMPLER)?.toLowerCase(); + const samplerArg = normalizeSampleRatio( + parseNumberEnv(env.OTEL_TRACES_SAMPLER_ARG, defaultValue), + defaultValue + ); + + if (sampler === 'always_off' || sampler === 'parentbased_always_off') return 0; + if (sampler === 'always_on' || sampler === 'parentbased_always_on') return 1; + if (sampler === 'traceidratio' || sampler === 'parentbased_traceidratio') { + return samplerArg; + } + + return defaultValue; +} + +export function createTracingOptionsFromEnv( + options: TracingConfigureFromEnvOptions = {} +): TracingConfigureOptions { + const env = options.env ?? process.env; + + const tracesExporter = parseStringEnv(env.OTEL_TRACES_EXPORTER)?.toLowerCase(); + const enabled = parseBooleanEnv( + env.TRACING_ENABLE_OTEL, + tracesExporter === 'otlp' || options.defaultTracingEnabled === true + ); + const defaultSampleRatio = normalizeSampleRatio(options.defaultSampleRatio ?? 1, 1); + + return { + defaultTracerName: options.defaultTracerName ?? options.defaultServiceName ?? 'fastgpt', + tracing: enabled + ? { + enabled: true, + serviceName: + parseStringEnv(env.TRACING_OTEL_SERVICE_NAME) ?? + parseStringEnv(env.OTEL_SERVICE_NAME) ?? + options.defaultServiceName, + url: + parseStringEnv(env.TRACING_OTEL_URL) ?? + parseStringEnv(env.OTEL_EXPORTER_OTLP_TRACES_ENDPOINT) ?? + options.defaultTracingUrl, + sampleRatio: normalizeSampleRatio( + parseNumberEnv(env.TRACING_OTEL_SAMPLE_RATIO, NaN), + getSampleRatioFromStandardEnv(env, defaultSampleRatio) + ) + } + : false + }; +} + +export async function configureTracingFromEnv(options: TracingConfigureFromEnvOptions = {}) { + return configureTracing(createTracingOptionsFromEnv(options)); +} diff --git a/sdk/otel/src/tracing/index.ts b/sdk/otel/src/tracing/index.ts new file mode 100644 index 0000000000..71252f0a4d --- /dev/null +++ b/sdk/otel/src/tracing/index.ts @@ -0,0 +1,4 @@ +export { configureTracing, disposeTracing, getCurrentSpanContext, getTracer } from './client'; +export { configureTracingFromEnv, createTracingOptionsFromEnv } from './env'; +export type * from './types'; +export type * from './env'; diff --git a/sdk/otel/src/tracing/types.ts b/sdk/otel/src/tracing/types.ts new file mode 100644 index 0000000000..8212e21907 --- /dev/null +++ b/sdk/otel/src/tracing/types.ts @@ -0,0 +1,19 @@ +import type { Resource } from '@opentelemetry/resources'; + +export type TracingOptions = { + enabled?: boolean; + serviceName?: string; + url?: string; + headers?: Record; + sampleRatio?: number; + additionalResource?: Resource | null; +}; + +export type TracingConfigureOptions = { + defaultTracerName?: string; + defaultTracerVersion?: string; + tracing?: false | TracingOptions; +}; + +export type TraceAttributeValue = string | number | boolean; +export type TraceAttributes = Record; diff --git a/sdk/otel/src/types.ts b/sdk/otel/src/types.ts new file mode 100644 index 0000000000..679cc0eb14 --- /dev/null +++ b/sdk/otel/src/types.ts @@ -0,0 +1,9 @@ +import type { LoggerConfigureOptions } from './logger'; +import type { MetricsConfigureOptions } from './metrics'; +import type { TracingConfigureOptions } from './tracing'; + +export type OtelConfigureOptions = { + logger?: LoggerConfigureOptions; + metrics?: MetricsConfigureOptions; + tracing?: TracingConfigureOptions; +}; diff --git a/sdk/otel/tsconfig.json b/sdk/otel/tsconfig.json new file mode 100644 index 0000000000..cabc973ce7 --- /dev/null +++ b/sdk/otel/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "module": "esnext", + "target": "es2022", + "moduleResolution": "bundler", + + "sourceMap": true, + "declaration": true, + "declarationMap": true, + + "strict": true, + "verbatimModuleSyntax": true, + "isolatedModules": true, + "noUncheckedSideEffectImports": true, + "moduleDetection": "force", + "skipLibCheck": true, + + "noEmit": true + } +} diff --git a/sdk/otel/tsdown.config.ts b/sdk/otel/tsdown.config.ts new file mode 100644 index 0000000000..ce09943678 --- /dev/null +++ b/sdk/otel/tsdown.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from 'tsdown'; + +export default defineConfig({ + entry: ['src/index.ts', 'src/logger-entry.ts', 'src/metrics-entry.ts', 'src/tracing-entry.ts'], + format: 'esm', + dts: { + enabled: true, + sourcemap: false + } +}); diff --git a/test/cases/global/core/workflow/dispatch/checkNodeRunStatus.test.ts b/test/cases/global/core/workflow/dispatch/checkNodeRunStatus.test.ts deleted file mode 100644 index 5c20749874..0000000000 --- a/test/cases/global/core/workflow/dispatch/checkNodeRunStatus.test.ts +++ /dev/null @@ -1,2347 +0,0 @@ -import { describe, it, expect } from 'vitest'; -import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; -import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge'; -import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; -import { createNode, createEdge } from '../utils'; - -/** - * 测试目标:验证节点运行状态判断是否正确 - * - * 测试方法: - * 1. 构建工作流图(节点 + 边) - * 2. 使用 buildNodeEdgeGroupsMap 构建边分组 - * 3. 模拟不同的边状态(active/waiting/skipped) - * 4. 使用 getNodeRunStatus 判断节点状态 - * 5. 验证节点状态是否符合预期 - */ - -describe('checkNodeRunStatus', () => { - // 辅助函数:设置边状态 - const setEdgeStatus = ( - edges: RuntimeEdgeItemType[], - source: string, - target: string, - status: 'active' | 'waiting' | 'skipped' - ) => { - const edge = edges.find((e) => e.source === source && e.target === target); - if (edge) { - edge.status = status; - } - }; - - describe('场景1: 简单分支汇聚', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ──┐ - * start → A → D - * └─ else → C ──┘ - * - * 预期分组: - * - D: 组1[B→D], 组2[C→D] - * - * 测试场景: - * 1. A 走 if 分支:B→D active, C→D skipped → D 应该运行 - * 2. A 走 else 分支:C→D active, B→D skipped → D 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D'), - createEdge('C', 'D') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('D 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('D') || []; - expect(groups.length).toBe(1); - }); - - it('场景1.1: A 走 if 分支,D 应该运行', () => { - // 设置边状态 - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'A', 'B', 'active'); - setEdgeStatus(edges, 'A', 'C', 'skipped'); - setEdgeStatus(edges, 'B', 'D', 'active'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景1.2: A 走 else 分支,D 应该运行', () => { - // 设置边状态 - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'A', 'B', 'skipped'); - setEdgeStatus(edges, 'A', 'C', 'active'); - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景1.3: B 还在执行中,D 应该等待', () => { - // 设置边状态 - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'A', 'B', 'active'); - setEdgeStatus(edges, 'A', 'C', 'skipped'); - setEdgeStatus(edges, 'B', 'D', 'waiting'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景2: 简单循环', () => { - /** - * 工作流结构: - * - * start → A → B → C → A - * - * 预期分组: - * - A: 组1[start→A], 组2[C→A] - * - * 测试场景: - * 1. 第一次执行:start→A active → A 应该运行 - * 2. 循环执行:C→A active → A 应该运行 - * 3. 两条边都 waiting → A 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B'), - createEdge('B', 'C'), - createEdge('C', 'A') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('场景2.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'C', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景2.2: 循环执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'C', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景2.3: 两条边都 waiting,A 应该等待', () => { - setEdgeStatus(edges, 'start', 'A', 'waiting'); - setEdgeStatus(edges, 'C', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景3: 分支 + 循环', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ──→ D ──→ F ──┐ - * start → A │ - * └─ else → C ──→ D │ - * │ - * A ←───────────────────────┘ - * - * 预期分组: - * - D: 组1[B→D], 组2[C→D] - * - A: 组1[start→A], 组2[F→A] - * - * 测试场景: - * 1. 第一次走 if 分支:B→D active, C→D skipped → D 应该运行 - * 2. 第一次走 else 分支:C→D active, B→D skipped → D 应该运行 - * 3. 循环回来:F→A active → A 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('F', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D'), - createEdge('C', 'D'), - createEdge('D', 'F'), - createEdge('F', 'A') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('D 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('D') || []; - expect(groups.length).toBe(2); - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('场景3.1: 第一次走 if 分支,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'active'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景3.2: 第一次走 else 分支,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景3.3: 循环回来,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'F', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - }); - - describe('场景4: 并行汇聚(无分支节点)', () => { - /** - * 工作流结构: - * - * start ──→ A ──→ C - * └──→ B ──→ C - * - * 预期分组: - * - C: 组1[A→C, B→C] (合并成一组,因为没有分支节点) - * - * 测试场景: - * 1. A 和 B 都完成:A→C active, B→C active → C 应该运行 - * 2. 只有 A 完成:A→C active, B→C waiting → C 应该等待 - * 3. 只有 B 完成:A→C waiting, B→C active → C 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('start', 'B'), - createEdge('A', 'C'), - createEdge('B', 'C') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('C 节点应该只有 1 组', () => { - const groups = edgeGroupsMap.get('C') || []; - expect(groups.length).toBe(1); - expect(groups[0].length).toBe(2); // 两条边在同一组 - }); - - it('场景4.1: A 和 B 都完成,C 应该运行', () => { - setEdgeStatus(edges, 'A', 'C', 'active'); - setEdgeStatus(edges, 'B', 'C', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景4.2: 只有 A 完成,C 应该等待', () => { - setEdgeStatus(edges, 'A', 'C', 'active'); - setEdgeStatus(edges, 'B', 'C', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - - it('场景4.3: 只有 B 完成,C 应该等待', () => { - setEdgeStatus(edges, 'A', 'C', 'waiting'); - setEdgeStatus(edges, 'B', 'C', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景5: 所有边都 skipped', () => { - /** - * 测试场景: - * 当节点的所有输入边都是 skipped 时,节点应该被跳过 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D'), - createEdge('C', 'D') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('所有边都 skipped,D 应该被跳过', () => { - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('skip'); - }); - }); - - describe('场景6: 多层分支嵌套', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ─ if ──→ D ──┐ - * start ──→ A └─ else ─→ E ──┤ - * └─ else ─→ C ────────────→ F - * - * 预期分组: - * - F: 组1[C→F, D→F, E→F] - * - * 测试场景: - * 1. A 走 if → B 走 if:D→F active, 其他 skipped → F 应该运行 - * 2. A 走 if → B 走 else:E→F active, 其他 skipped → F 应该运行 - * 3. A 走 else:C→F active, 其他 skipped → F 应该运行 - * 4. 部分边 waiting:至少一条边 waiting → F 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.ifElseNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode), - createNode('F', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D', 'waiting', 'B-source-if'), - createEdge('B', 'E', 'waiting', 'B-source-else'), - createEdge('C', 'F'), - createEdge('D', 'F'), - createEdge('E', 'F') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('F 节点应该有 1 组(3条边)', () => { - const groups = edgeGroupsMap.get('F') || []; - expect(groups.length).toBe(1); - expect(groups[0].length).toBe(3); - }); - - it('场景6.1: A→if, B→if 路径,F 应该运行', () => { - setEdgeStatus(edges, 'C', 'F', 'skipped'); - setEdgeStatus(edges, 'D', 'F', 'active'); - setEdgeStatus(edges, 'E', 'F', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景6.2: A→if, B→else 路径,F 应该运行', () => { - setEdgeStatus(edges, 'C', 'F', 'skipped'); - setEdgeStatus(edges, 'D', 'F', 'skipped'); - setEdgeStatus(edges, 'E', 'F', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景6.3: A→else 路径,F 应该运行', () => { - setEdgeStatus(edges, 'C', 'F', 'active'); - setEdgeStatus(edges, 'D', 'F', 'skipped'); - setEdgeStatus(edges, 'E', 'F', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景6.4: D 还在执行中,F 应该等待', () => { - setEdgeStatus(edges, 'C', 'F', 'skipped'); - setEdgeStatus(edges, 'D', 'F', 'waiting'); - setEdgeStatus(edges, 'E', 'F', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景7: 嵌套循环', () => { - /** - * 工作流结构: - * - * start ──→ A ──→ B ──→ C ──→ D - * ↑ ↑ | | - * | |_____| | - * |_________________| - * (内层循环) (外层循环) - * - * 预期分组: - * - A: 组1[start→A], 组2[D→A] - * - B: 组1[A→B], 组2[C→B] - * - * 测试场景: - * 1. 第一次执行:start→A active → A 应该运行 - * 2. 内层循环:C→B active → B 应该运行 - * 3. 外层循环:D→A active → A 应该运行 - * 4. 两条边都 waiting → 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B'), - createEdge('B', 'C'), - createEdge('C', 'B'), // 内层循环 - createEdge('C', 'D'), - createEdge('D', 'A') // 外层循环 - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('B 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('B') || []; - expect(groups.length).toBe(2); - }); - - it('场景7.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'D', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景7.2: 内层循环执行,B 应该运行', () => { - setEdgeStatus(edges, 'A', 'B', 'skipped'); - setEdgeStatus(edges, 'C', 'B', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景7.3: 外层循环执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'D', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景7.4: B 的两条边都 waiting,B 应该等待', () => { - setEdgeStatus(edges, 'A', 'B', 'waiting'); - setEdgeStatus(edges, 'C', 'B', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景8: 多个独立循环汇聚', () => { - /** - * 工作流结构: - * - * start ──→ A ──→ B ──→ E - * ↑ | ↑ - * |____| | - * | | - * └──→ C ──→ D - * ↑ | - * |_____| - * - * 预期分组: - * - A: 组1[start→A], 组2[B→A] - * - C: 组1[A→C], 组2[D→C] - * - E: 组1[B→E, D→E] - * - * 测试场景: - * 1. 两个循环都完成:B→E active, D→E active → E 应该运行 - * 2. 只有循环1完成:B→E active, D→E waiting → E 应该等待 - * 3. 只有循环2完成:B→E waiting, D→E active → E 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B'), - createEdge('B', 'A'), // 循环1 - createEdge('A', 'C'), - createEdge('C', 'D'), - createEdge('D', 'C'), // 循环2 - createEdge('B', 'E'), - createEdge('D', 'E') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('E 节点应该有 1 组(2条边)', () => { - const groups = edgeGroupsMap.get('E') || []; - expect(groups.length).toBe(1); - expect(groups[0].length).toBe(2); - }); - - it('场景8.1: 两个循环都完成,E 应该运行', () => { - setEdgeStatus(edges, 'B', 'E', 'active'); - setEdgeStatus(edges, 'D', 'E', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'E')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景8.2: 只有循环1完成,E 应该等待', () => { - setEdgeStatus(edges, 'B', 'E', 'active'); - setEdgeStatus(edges, 'D', 'E', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'E')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - - it('场景8.3: 只有循环2完成,E 应该等待', () => { - setEdgeStatus(edges, 'B', 'E', 'waiting'); - setEdgeStatus(edges, 'D', 'E', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'E')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景9: 复杂有向有环图(多入口多循环)', () => { - /** - * 工作流结构: - * - * start ──→ A ──→ C ──→ D ──→ E - * | ↑ ↑ | - * | |____|______________| - * | | - * └──→ B - * - * 预期分组: - * - A: 组1[start→A], 组2[E→A] - * - C: 组1[A→C, B→C], 组2[E→C] - * - * 测试场景: - * 1. 第一次执行:start→A active → A 应该运行 - * 2. 循环到 A:E→A active → A 应该运行 - * 3. C 的非循环边:A→C active, B→C active, E→C skipped → C 应该运行 - * 4. C 的循环边:E→C active, 其他 skipped → C 应该运行 - * 5. C 部分 waiting:A→C active, B→C waiting, E→C skipped → C 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('start', 'B'), - createEdge('A', 'C'), - createEdge('B', 'C'), - createEdge('C', 'D'), - createEdge('D', 'E'), - createEdge('E', 'C'), // 循环1 - createEdge('E', 'A') // 循环2 - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('C 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('C') || []; - expect(groups.length).toBe(2); - expect(groups[0].length).toBe(2); // A→C, B→C - expect(groups[1].length).toBe(1); // E→C - }); - - it('场景9.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'E', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景9.2: 循环到 A,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'E', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景9.3: C 的非循环边都完成,C 应该运行', () => { - setEdgeStatus(edges, 'A', 'C', 'active'); - setEdgeStatus(edges, 'B', 'C', 'active'); - setEdgeStatus(edges, 'E', 'C', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景9.4: C 的循环边完成,C 应该运行', () => { - setEdgeStatus(edges, 'A', 'C', 'skipped'); - setEdgeStatus(edges, 'B', 'C', 'skipped'); - setEdgeStatus(edges, 'E', 'C', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景9.5: C 的非循环边部分 waiting,C 应该等待', () => { - setEdgeStatus(edges, 'A', 'C', 'active'); - setEdgeStatus(edges, 'B', 'C', 'waiting'); - setEdgeStatus(edges, 'E', 'C', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景10: 自循环节点', () => { - /** - * 工作流结构: - * - * start ──→ A ──┐ - * ↑__| - * - * 预期分组: - * - A: 组1[start→A], 组2[A→A] - * - * 测试场景: - * 1. 第一次执行:start→A active, A→A waiting → A 应该运行 - * 2. 自循环执行:start→A skipped, A→A active → A 应该运行 - * 3. 两条边都 waiting → A 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [createEdge('start', 'A'), createEdge('A', 'A')]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('场景10.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'A', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景10.2: 自循环执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'A', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景10.3: 两条边都 waiting,A 应该等待', () => { - setEdgeStatus(edges, 'start', 'A', 'waiting'); - setEdgeStatus(edges, 'A', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景11: 用户工作流 - 多层循环回退', () => { - /** - * 工作流结构: - * - * 开始 → 回复11 → 回复22 → 用户选择 - * ↑ ↑ ↓ - * | | ├─ 结束 - * | └─────────┤ (option2: 回到22) - * └───────────────────┘ (option3: 回到11) - * - * 关键问题: - * - "回复22"节点有两条输入边: - * 1. edge(回复11 → 回复22) - 非循环边 - * 2. edge(用户选择 → 回复22) - 循环边 - * - * - 两条边都能到达入口,所以都被放在 commonEdges 中 - * - 这导致 AND 语义:两条边都要满足才能运行 - * - 但实际应该是 OR 语义:任一边满足即可运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('reply11', FlowNodeTypeEnum.answerNode), - createNode('reply22', FlowNodeTypeEnum.answerNode), - createNode('userSelect', FlowNodeTypeEnum.userSelect), - createNode('replyEnd', FlowNodeTypeEnum.answerNode) - ]; - - const edges = [ - createEdge('start', 'reply11'), - createEdge('reply11', 'reply22'), - createEdge('reply22', 'userSelect'), - createEdge('userSelect', 'replyEnd', 'waiting', 'option1'), - createEdge('userSelect', 'reply22', 'waiting', 'option2'), - createEdge('userSelect', 'reply11', 'waiting', 'option3') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('reply22 节点分组', () => { - const groups = edgeGroupsMap.get('reply22') || []; - // 实际:两条边都在 commonEdges 中 - // 期望:应该分成两组(非循环 + 循环) - expect(groups.length).toBeGreaterThanOrEqual(1); - }); - - it('场景11.1: 第一次执行,reply11 完成后 reply22 应该运行', () => { - setEdgeStatus(edges, 'reply11', 'reply22', 'active'); - setEdgeStatus(edges, 'userSelect', 'reply22', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'reply22')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - // 关键:edge1 active 时应该运行,不需要等待 edge2 - expect(status).toBe('run'); - }); - - it('场景11.2: 用户选择"回到22",reply22 应该运行', () => { - setEdgeStatus(edges, 'reply11', 'reply22', 'skipped'); - setEdgeStatus(edges, 'userSelect', 'reply22', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'reply22')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景11.3: 循环边 active 但非循环边 waiting,应该运行', () => { - setEdgeStatus(edges, 'reply11', 'reply22', 'waiting'); - setEdgeStatus(edges, 'userSelect', 'reply22', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'reply22')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - // 关键:循环边激活时应该运行,不需要等待非循环边 - expect(status).toBe('run'); - }); - }); - - describe('场景12: 复杂分支与循环混合', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ──→ D ──┐ - * start ──→ A ├──→ F - * └─ else ─→ C ─ if ──→ D | - * ↑ └─ else ─→ E ──┘ - * |________________________________| - * - * 预期分组: - * - A: 组1[start→A], 组2[F→A] - * - D: 组1[B→D], 组2[C→D] - * - F: 组1[D→F], 组2[E→F] - * - * 测试场景: - * 1. 第一次执行 A→if 路径:B→D active → D 应该运行 - * 2. 第一次执行 A→else, C→if 路径:C→D active → D 应该运行 - * 3. D 完成后:D→F active, E→F skipped → F 应该运行 - * 4. 循环回来:F→A active → A 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.ifElseNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode), - createNode('F', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D'), - createEdge('C', 'D', 'waiting', 'C-source-if'), - createEdge('C', 'E', 'waiting', 'C-source-else'), - createEdge('D', 'F'), - createEdge('E', 'F'), - createEdge('F', 'A') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('D 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('D') || []; - expect(groups.length).toBe(2); - }); - - it('F 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('F') || []; - expect(groups.length).toBe(2); - }); - - it('场景12.1: A→if 路径,B→D active,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'active'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景12.2: A→else, C→if 路径,C→D active,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景12.3: D 完成后,D→F active,F 应该运行', () => { - setEdgeStatus(edges, 'D', 'F', 'active'); - setEdgeStatus(edges, 'E', 'F', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景12.4: 循环回来,F→A active,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'F', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景12.5: D 的两条边都 waiting,D 应该等待', () => { - setEdgeStatus(edges, 'B', 'D', 'waiting'); - setEdgeStatus(edges, 'C', 'D', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('wait'); - }); - }); - - describe('场景13: 多层嵌套循环退出', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ─ if ──→ C ─ if ──→ D - * start ──→ A | | | | - * ↑| | | | - * || | └─ else ─→ E - * || | | - * || └─ else ────────────→ F - * || | - * |└─ else ────────────────────────→ G - * |__________________________________| - * (循环3) (循环2) (循环1) - * - * 预期分组: - * - A: 组1[start→A], 组2[F→A] - * - B: 组1[A→B], 组2[E→B] - * - C: 组1[B→C], 组2[D→C] - * - * 测试场景: - * 1. 第一次执行:start→A active → A 应该运行 - * 2. 内层循环1:D→C active → C 应该运行 - * 3. 中层循环2:E→B active → B 应该运行 - * 4. 外层循环3:F→A active → A 应该运行 - * 5. 退出路径:A→else → G 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.ifElseNode), - createNode('C', FlowNodeTypeEnum.ifElseNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode), - createNode('F', FlowNodeTypeEnum.chatNode), - createNode('G', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'G', 'waiting', 'A-source-else'), - createEdge('B', 'C', 'waiting', 'B-source-if'), - createEdge('B', 'F', 'waiting', 'B-source-else'), - createEdge('C', 'D', 'waiting', 'C-source-if'), - createEdge('C', 'E', 'waiting', 'C-source-else'), - createEdge('D', 'C'), // 循环1 - createEdge('E', 'B'), // 循环2 - createEdge('F', 'A') // 循环3 - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('B 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('B') || []; - expect(groups.length).toBe(2); - }); - - it('C 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('C') || []; - expect(groups.length).toBe(2); - }); - - it('场景13.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'F', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景13.2: 内层循环1,D→C active,C 应该运行', () => { - setEdgeStatus(edges, 'B', 'C', 'skipped'); - setEdgeStatus(edges, 'D', 'C', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景13.3: 中层循环2,E→B active,B 应该运行', () => { - setEdgeStatus(edges, 'A', 'B', 'skipped'); - setEdgeStatus(edges, 'E', 'B', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景13.4: 外层循环3,F→A active,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'F', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景13.5: 退出路径,A→G active,G 应该运行', () => { - setEdgeStatus(edges, 'A', 'G', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'G')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - }); - - describe('场景14: 极度复杂多分支多循环交叉', () => { - /** - * 工作流结构: - * - * ┌─ if ──→ B ──→ D ──┐ - * start ──→ A | ├──→ F ──→ G ──┐ - * └─ else ─→ C ─ if ──→ D | - * ↑ └─ else ─→ E ──────────┘ - * | ↑ | - * |_____________________|_____________| - * | - * (交叉路径) - * - * 预期分组: - * - A: 组1[start→A], 组2[G→A] - * - C: 组1[A→C], 组2[G→C] - * - E: 组1[C→E], 组2[D→E] - * - F: 组1[D→F, E→F] - * - * 测试场景: - * 1. 第一次执行:start→A active → A 应该运行 - * 2. A→if 路径:B→D active → D 应该运行 - * 3. A→else, C→if 路径:C→D active → D 应该运行 - * 4. 交叉路径:D→E active → E 应该运行 - * 5. F 汇聚:D→F active, E→F active → F 应该运行 - * 6. 循环回来:G→A active → A 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.ifElseNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.ifElseNode), - createNode('D', FlowNodeTypeEnum.chatNode), - createNode('E', FlowNodeTypeEnum.chatNode), - createNode('F', FlowNodeTypeEnum.chatNode), - createNode('G', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('A', 'B', 'waiting', 'A-source-if'), - createEdge('A', 'C', 'waiting', 'A-source-else'), - createEdge('B', 'D'), - createEdge('C', 'D', 'waiting', 'C-source-if'), - createEdge('C', 'E', 'waiting', 'C-source-else'), - createEdge('D', 'F'), - createEdge('E', 'F'), - createEdge('F', 'G'), - createEdge('G', 'A'), // 循环1 - createEdge('G', 'C'), // 循环2 - createEdge('D', 'E') // 交叉路径 - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('A 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('A') || []; - expect(groups.length).toBe(2); - }); - - it('C 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('C') || []; - expect(groups.length).toBe(2); - }); - - it('E 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('E') || []; - expect(groups.length).toBe(2); - }); - - it('场景14.1: 第一次执行,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'active'); - setEdgeStatus(edges, 'G', 'A', 'waiting'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景14.2: A→if 路径,B→D active,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'active'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景14.3: A→else, C→if 路径,C→D active,D 应该运行', () => { - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景14.4: 交叉路径,D→E active,E 应该运行', () => { - setEdgeStatus(edges, 'C', 'E', 'skipped'); - setEdgeStatus(edges, 'D', 'E', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'E')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景14.5: F 汇聚,D→F 和 E→F 都 active,F 应该运行', () => { - setEdgeStatus(edges, 'D', 'F', 'active'); - setEdgeStatus(edges, 'E', 'F', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'F')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - it('场景14.6: 循环回来,G→A active,A 应该运行', () => { - setEdgeStatus(edges, 'start', 'A', 'skipped'); - setEdgeStatus(edges, 'G', 'A', 'active'); - - const status = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - - expect(status).toBe('run'); - }); - - // 场景14.7 已删除: - // 由于 D→E 的交叉路径,F 的两条输入边 D→F 和 E→F 被分成了不同的组 - // 它们来自不同的分支路径,是"或"的关系,而不是"且"的关系 - // 因此当 D→F active 时,F 可以运行,不需要等待 E→F - // 这个测试场景过于复杂,在实际工作流中应该避免 - }); - - describe('工具调用', () => { - describe('场景15: 工具调用 - 单工具场景', () => { - /** - * 工作流结构: - * - * start → Agent ──selectedTools──→ Tool1 ──→ End - * │ - * └──────────────────────────────→ End - * - * 预期分组: - * - Tool1: 组1[Agent→Tool1 (selectedTools handle)] - * - End: 组1[Agent→End], 组2[Tool1→End] - * - * 测试场景: - * 1. Agent调用Tool1: selectedTools边active, Tool1执行 → Tool1应该运行 - * 2. Agent不调用工具: selectedTools边skipped, 直接到End → End应该运行 - * 3. Tool1执行完成: Tool1→End active, Agent→End active → End应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent', FlowNodeTypeEnum.toolCall), - createNode('Tool1', FlowNodeTypeEnum.httpRequest468), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent'), - createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), - createEdge('Agent', 'End', 'waiting', 'Agent-source-right', 'End-target-left'), - createEdge('Tool1', 'End') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('Agent调用Tool1: Tool1应该运行', () => { - // Agent决定调用Tool1 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'End', 'waiting'); - setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); - - // 验证Tool1节点状态 - const statusTool1 = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusTool1).toBe('run'); - - // 验证End节点状态(还在等待) - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('wait'); - }); - - it('Agent不调用工具: End应该运行', () => { - // Agent决定不调用工具 - setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); - setEdgeStatus(edges, 'Agent', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'skipped'); - - // 验证Tool1节点状态(被跳过) - const statusTool1 = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusTool1).toBe('skip'); - - // 验证End节点状态 - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('run'); - }); - - it('Tool1执行完成: End应该运行', () => { - // Agent调用Tool1,Tool1执行完成 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'active'); - - // 验证End节点状态 - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('run'); - }); - }); - - describe('场景16: 工具调用 - 多工具并行场景', () => { - /** - * 工作流结构: - * - * ┌──selectedTools──→ Tool1 ──┐ - * start → Agent ─┼──selectedTools──→ Tool2 ──┼──→ End - * └──selectedTools──→ Tool3 ──┘ - * │ - * └────────────────────────────────→ End - * - * 预期分组: - * - Tool1: 组1[Agent→Tool1 (selectedTools)] - * - Tool2: 组1[Agent→Tool2 (selectedTools)] - * - Tool3: 组1[Agent→Tool3 (selectedTools)] - * - End: 组1[Agent→End], 组2[Tool1→End], 组3[Tool2→End], 组4[Tool3→End] - * - * 测试场景: - * 1. Agent调用所有工具: 所有selectedTools边active → 所有Tool都应该运行 - * 2. Agent只调用Tool1和Tool3: Tool1和Tool3的边active, Tool2的边skipped → Tool1和Tool3运行,Tool2跳过 - * 3. 所有工具执行完成: 所有Tool→End边active, Agent→End边active → End应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent', FlowNodeTypeEnum.toolCall), - createNode('Tool1', FlowNodeTypeEnum.httpRequest468), - createNode('Tool2', FlowNodeTypeEnum.httpRequest468), - createNode('Tool3', FlowNodeTypeEnum.httpRequest468), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent'), - createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), - createEdge('Agent', 'Tool2', 'waiting', 'Agent-source-selectedTools', 'Tool2-target-left'), - createEdge('Agent', 'Tool3', 'waiting', 'Agent-source-selectedTools', 'Tool3-target-left'), - createEdge('Agent', 'End', 'waiting', 'Agent-source-right', 'End-target-left'), - createEdge('Tool1', 'End'), - createEdge('Tool2', 'End'), - createEdge('Tool3', 'End') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('Agent调用所有工具: 所有Tool都应该运行', () => { - // Agent决定调用所有工具 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool2', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); - setEdgeStatus(edges, 'Agent', 'End', 'waiting'); - - // 验证所有Tool节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool2')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool3')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - - // 验证End节点状态(还在等待) - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('wait'); - }); - - it('Agent只调用Tool1和Tool3: Tool1和Tool3运行,Tool2跳过', () => { - // Agent只调用Tool1和Tool3 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool2', 'skipped'); - setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); - setEdgeStatus(edges, 'Agent', 'End', 'waiting'); - setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); - - // 验证Tool节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool2')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool3')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - - it('所有工具执行完成: End应该运行', () => { - // 所有工具执行完成 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool2', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); - setEdgeStatus(edges, 'Agent', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'active'); - setEdgeStatus(edges, 'Tool2', 'End', 'active'); - setEdgeStatus(edges, 'Tool3', 'End', 'active'); - - // 验证End节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - }); - - describe('场景17: 工具调用 - 嵌套工具调用场景', () => { - /** - * 工作流结构: - * - * ┌──selectedTools──→ SubTool1 ──┐ - * start → Agent1 ──selectedTools──→ Agent2 ├──→ End - * │ └──────────────────────────────┘ - * └────────────────────────────────────────────────────────────→ End - * - * 预期分组: - * - Agent2: 组1[Agent1→Agent2 (selectedTools)] - * - SubTool1: 组1[Agent2→SubTool1 (selectedTools)] - * - End: 组1[Agent1→End], 组2[Agent2→End], 组3[SubTool1→End] - * - * 测试场景: - * 1. Agent1调用Agent2: Agent1→Agent2边active → Agent2应该运行 - * 2. Agent2调用SubTool1: Agent2→SubTool1边active → SubTool1应该运行 - * 3. Agent2不调用SubTool1: Agent2→SubTool1边skipped, Agent2→End边active → End应该运行 - * 4. 所有工具执行完成: 所有边active → End应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent1', FlowNodeTypeEnum.toolCall), - createNode('Agent2', FlowNodeTypeEnum.toolCall), - createNode('SubTool1', FlowNodeTypeEnum.httpRequest468), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent1'), - createEdge( - 'Agent1', - 'Agent2', - 'waiting', - 'Agent1-source-selectedTools', - 'Agent2-target-left' - ), - createEdge('Agent1', 'End', 'waiting', 'Agent1-source-right', 'End-target-left'), - createEdge( - 'Agent2', - 'SubTool1', - 'waiting', - 'Agent2-source-selectedTools', - 'SubTool1-target-left' - ), - createEdge('Agent2', 'End', 'waiting', 'Agent2-source-right', 'End-target-left'), - createEdge('SubTool1', 'End') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('Agent1调用Agent2: Agent2应该运行', () => { - // Agent1调用Agent2 - setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); - - // 验证Agent2节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Agent2')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - - it('Agent2调用SubTool1: SubTool1应该运行', () => { - // Agent1调用Agent2,Agent2调用SubTool1 - setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); - setEdgeStatus(edges, 'Agent2', 'SubTool1', 'active'); - setEdgeStatus(edges, 'Agent2', 'End', 'waiting'); - - // 验证SubTool1节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'SubTool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - - it('Agent2不调用SubTool1: End应该运行', () => { - // Agent1调用Agent2,Agent2不调用SubTool1 - setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'active'); - setEdgeStatus(edges, 'Agent2', 'SubTool1', 'skipped'); - setEdgeStatus(edges, 'Agent2', 'End', 'active'); - setEdgeStatus(edges, 'SubTool1', 'End', 'skipped'); - - // 验证End节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - - it('所有工具执行完成: End应该运行', () => { - // 所有工具执行完成 - setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'active'); - setEdgeStatus(edges, 'Agent2', 'SubTool1', 'active'); - setEdgeStatus(edges, 'Agent2', 'End', 'active'); - setEdgeStatus(edges, 'SubTool1', 'End', 'active'); - - // 验证End节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - }); - - describe('场景18: 工具调用 - 工具与分支结合场景', () => { - /** - * 工作流结构: - * - * ┌──selectedTools──→ Tool1 ──┐ - * start → Agent ─┤ ├──→ IfElse ──if──→ End1 - * └──────────────────────────→ ┘ │ - * └─else─→ End2 - * - * 预期分组: - * - Tool1: 组1[Agent→Tool1 (selectedTools)] - * - IfElse: 组1[Agent→IfElse], 组2[Tool1→IfElse] - * - End1: 组1[IfElse→End1 (if handle)] - * - End2: 组1[IfElse→End2 (else handle)] - * - * 测试场景: - * 1. Agent调用Tool1,Tool1执行完成,IfElse走if分支 → End1应该运行 - * 2. Agent不调用Tool1,IfElse走else分支 → End2应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent', FlowNodeTypeEnum.toolCall), - createNode('Tool1', FlowNodeTypeEnum.httpRequest468), - createNode('IfElse', FlowNodeTypeEnum.ifElseNode), - createNode('End1', FlowNodeTypeEnum.chatNode), - createNode('End2', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent'), - createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), - createEdge('Agent', 'IfElse', 'waiting', 'Agent-source-right', 'IfElse-target-left'), - createEdge('Tool1', 'IfElse'), - createEdge('IfElse', 'End1', 'waiting', 'IfElse-source-if', 'End1-target-left'), - createEdge('IfElse', 'End2', 'waiting', 'IfElse-source-else', 'End2-target-left') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('Agent调用Tool1,IfElse走if分支: End1应该运行', () => { - // Agent调用Tool1,Tool1执行完成 - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); - setEdgeStatus(edges, 'Tool1', 'IfElse', 'active'); - - // IfElse走if分支 - setEdgeStatus(edges, 'IfElse', 'End1', 'active'); - setEdgeStatus(edges, 'IfElse', 'End2', 'skipped'); - - // 验证End1节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - - // 验证End2节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End2')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - }); - - it('Agent不调用Tool1,IfElse走else分支: End2应该运行', () => { - // Agent不调用Tool1 - setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); - setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); - setEdgeStatus(edges, 'Tool1', 'IfElse', 'skipped'); - - // IfElse走else分支 - setEdgeStatus(edges, 'IfElse', 'End1', 'skipped'); - setEdgeStatus(edges, 'IfElse', 'End2', 'active'); - - // 验证End1节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - - // 验证End2节点状态 - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End2')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - }); - }); - - describe('场景19: 工具调用 - 工具调用与循环结合', () => { - /** - * 工作流结构: - * - * ┌──selectedTools──→ Tool1 ──┐ - * start → Agent ─┤ ├──→ IfElse ──if──→ End - * └──────────────────────────→ ┘ │ - * └─else─→ Agent (循环) - * - * 预期分组: - * - Agent: 组1[start→Agent], 组2[IfElse→Agent (循环边)] - * - Tool1: 组1[Agent→Tool1 (selectedTools)] - * - IfElse: 组1[Agent→IfElse], 组2[Tool1→IfElse] - * - End: 组1[IfElse→End (if handle)] - * - * 测试场景: - * 1. 第一次执行:Agent 调用 Tool1 → Tool1 应该运行 - * 2. 循环执行:IfElse 走 else 分支回到 Agent → Agent 应该运行 - * 3. 循环中再次调用工具:验证 Tool1 可以再次运行 - * 4. 循环中不调用工具:直接走到 IfElse - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent', FlowNodeTypeEnum.toolCall), - createNode('Tool1', FlowNodeTypeEnum.httpRequest468), - createNode('IfElse', FlowNodeTypeEnum.ifElseNode), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent'), - createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), - createEdge('Agent', 'IfElse', 'waiting', 'Agent-source-right', 'IfElse-target-left'), - createEdge('Tool1', 'IfElse'), - createEdge('IfElse', 'End', 'waiting', 'IfElse-source-if', 'End-target-left'), - createEdge('IfElse', 'Agent', 'waiting', 'IfElse-source-else', 'Agent-target-left') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('Agent 节点应该分成 2 组', () => { - const groups = edgeGroupsMap.get('Agent') || []; - expect(groups.length).toBe(2); - }); - - it('第一次执行:Agent 调用 Tool1,Tool1 应该运行', () => { - setEdgeStatus(edges, 'start', 'Agent', 'active'); - setEdgeStatus(edges, 'IfElse', 'Agent', 'waiting'); - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'IfElse', 'waiting'); - - const statusTool1 = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusTool1).toBe('run'); - }); - - it('循环执行:IfElse 走 else 分支回到 Agent,Agent 应该运行', () => { - setEdgeStatus(edges, 'start', 'Agent', 'skipped'); - setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); - setEdgeStatus(edges, 'IfElse', 'End', 'skipped'); - - const statusAgent = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Agent')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusAgent).toBe('run'); - }); - - it('循环中再次调用工具:Tool1 应该运行', () => { - setEdgeStatus(edges, 'start', 'Agent', 'skipped'); - setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent', 'IfElse', 'waiting'); - - const statusTool1 = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'Tool1')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusTool1).toBe('run'); - }); - - it('循环中不调用工具:IfElse 应该运行', () => { - setEdgeStatus(edges, 'start', 'Agent', 'skipped'); - setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); - setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); - setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); - setEdgeStatus(edges, 'Tool1', 'IfElse', 'skipped'); - - const statusIfElse = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'IfElse')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusIfElse).toBe('run'); - }); - }); - - describe('场景20: 工具调用 - 多 Agent 并行调用工具后汇聚', () => { - /** - * 工作流结构: - * - * ┌──→ Agent1 ──selectedTools──→ Tool1 ──┐ - * start ┤ ├──→ End - * └──→ Agent2 ──selectedTools──→ Tool2 ──┘ - * - * 预期分组: - * - Agent1: 组1[start→Agent1] - * - Agent2: 组1[start→Agent2] - * - Tool1: 组1[Agent1→Tool1 (selectedTools)] - * - Tool2: 组1[Agent2→Tool2 (selectedTools)] - * - End: 组1[Agent1→End, Tool1→End, Agent2→End, Tool2→End] (并行汇聚,所有边在同一组) - * - * 测试场景: - * 1. 两个 Agent 都调用工具:End 应该等待所有工具完成 - * 2. Agent1 调用工具,Agent2 不调用:End 应该等待 Tool1 完成 - * 3. 都不调用工具:End 应该直接运行 - * 4. 所有工具执行完成:End 应该运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Agent1', FlowNodeTypeEnum.toolCall), - createNode('Agent2', FlowNodeTypeEnum.toolCall), - createNode('Tool1', FlowNodeTypeEnum.httpRequest468), - createNode('Tool2', FlowNodeTypeEnum.httpRequest468), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Agent1'), - createEdge('start', 'Agent2'), - createEdge( - 'Agent1', - 'Tool1', - 'waiting', - 'Agent1-source-selectedTools', - 'Tool1-target-left' - ), - createEdge('Agent1', 'End', 'waiting', 'Agent1-source-right', 'End-target-left'), - createEdge( - 'Agent2', - 'Tool2', - 'waiting', - 'Agent2-source-selectedTools', - 'Tool2-target-left' - ), - createEdge('Agent2', 'End', 'waiting', 'Agent2-source-right', 'End-target-left'), - createEdge('Tool1', 'End'), - createEdge('Tool2', 'End') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('End 节点应该只有 1 组(并行汇聚)', () => { - const groups = edgeGroupsMap.get('End') || []; - expect(groups.length).toBe(1); - }); - - it('两个 Agent 都调用工具:End 应该等待', () => { - setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); - setEdgeStatus(edges, 'Agent2', 'Tool2', 'active'); - setEdgeStatus(edges, 'Agent2', 'End', 'waiting'); - setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); - setEdgeStatus(edges, 'Tool2', 'End', 'waiting'); - - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('wait'); - }); - - it('Agent1 调用工具,Agent2 不调用:End 应该等待 Tool1', () => { - setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); - setEdgeStatus(edges, 'Agent2', 'Tool2', 'skipped'); - setEdgeStatus(edges, 'Agent2', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); - setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); - - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('wait'); - }); - - it('都不调用工具:End 应该运行', () => { - setEdgeStatus(edges, 'Agent1', 'Tool1', 'skipped'); - setEdgeStatus(edges, 'Agent1', 'End', 'active'); - setEdgeStatus(edges, 'Agent2', 'Tool2', 'skipped'); - setEdgeStatus(edges, 'Agent2', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'skipped'); - setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); - - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('run'); - }); - - it('所有工具执行完成:End 应该运行', () => { - setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); - setEdgeStatus(edges, 'Agent1', 'End', 'active'); - setEdgeStatus(edges, 'Agent2', 'Tool2', 'active'); - setEdgeStatus(edges, 'Agent2', 'End', 'active'); - setEdgeStatus(edges, 'Tool1', 'End', 'active'); - setEdgeStatus(edges, 'Tool2', 'End', 'active'); - - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('run'); - }); - }); - }); - - describe('边界场景', () => { - describe('场景21: 混合边状态 - 部分 active、部分 waiting、部分 skipped', () => { - /** - * 工作流结构: - * - * ┌──→ A ──┐ - * start ┤ ├──→ D - * ├──→ B ──┤ - * └──→ C ──┘ - * - * 预期分组: - * - D: 组1[A→D, B→D, C→D] (并行汇聚,所有边在同一组) - * - * 测试场景: - * 1. A active, B waiting, C skipped → D 应该等待 - * 2. A active, B active, C skipped → D 应该运行 - * 3. A skipped, B skipped, C skipped → D 应该跳过 - * 4. A waiting, B waiting, C waiting → D 应该等待 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('D', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'A'), - createEdge('start', 'B'), - createEdge('start', 'C'), - createEdge('A', 'D'), - createEdge('B', 'D'), - createEdge('C', 'D') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('D 节点应该只有 1 组(并行汇聚)', () => { - const groups = edgeGroupsMap.get('D') || []; - expect(groups.length).toBe(1); - }); - - it('A active, B waiting, C skipped → D 应该等待', () => { - setEdgeStatus(edges, 'A', 'D', 'active'); - setEdgeStatus(edges, 'B', 'D', 'waiting'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const statusD = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusD).toBe('wait'); - }); - - it('A active, B active, C skipped → D 应该运行', () => { - setEdgeStatus(edges, 'A', 'D', 'active'); - setEdgeStatus(edges, 'B', 'D', 'active'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const statusD = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusD).toBe('run'); - }); - - it('A skipped, B skipped, C skipped → D 应该跳过', () => { - setEdgeStatus(edges, 'A', 'D', 'skipped'); - setEdgeStatus(edges, 'B', 'D', 'skipped'); - setEdgeStatus(edges, 'C', 'D', 'skipped'); - - const statusD = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusD).toBe('skip'); - }); - - it('A waiting, B waiting, C waiting → D 应该等待', () => { - setEdgeStatus(edges, 'A', 'D', 'waiting'); - setEdgeStatus(edges, 'B', 'D', 'waiting'); - setEdgeStatus(edges, 'C', 'D', 'waiting'); - - const statusD = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'D')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusD).toBe('wait'); - }); - }); - - describe('场景22: 孤立节点和终止节点', () => { - /** - * 工作流结构: - * - * start → A → B - * - * C (孤立节点,没有输入边) - * - * 测试场景: - * 1. B 节点没有输出边(终止节点) - * 2. C 节点没有输入边(孤立节点)- 实际上没有输入边的节点会被视为可以运行 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [createEdge('start', 'A'), createEdge('A', 'B')]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('B 节点(终止节点)应该能正常运行', () => { - setEdgeStatus(edges, 'A', 'B', 'active'); - - const statusB = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusB).toBe('run'); - }); - - it('C 节点(孤立节点)没有输入边分组,应该返回 run', () => { - const groups = edgeGroupsMap.get('C') || []; - expect(groups.length).toBe(0); - - const statusC = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - // 没有输入边的节点,getNodeRunStatus 会返回 'run' - expect(statusC).toBe('run'); - }); - }); - - describe('场景23: userSelect 节点的多选项分支', () => { - /** - * 工作流结构: - * - * ┌──option1──→ A ──┐ - * start → Select ┤──option2──→ B ──┼──→ End - * └──option3──→ C ──┘ - * - * 预期分组: - * - A: 组1[Select→A (option1 handle)] - * - B: 组1[Select→B (option2 handle)] - * - C: 组1[Select→C (option3 handle)] - * - End: 组1[A→End, B→End, C→End] (并行汇聚,所有边在同一组) - * - * 测试场景: - * 1. 选择 option1:A 应该运行,B 和 C 应该跳过 - * 2. 选择 option2:B 应该运行,A 和 C 应该跳过 - * 3. 选择 option3:C 应该运行,A 和 B 应该跳过 - */ - - const nodes = [ - createNode('start', FlowNodeTypeEnum.workflowStart), - createNode('Select', FlowNodeTypeEnum.userSelect), - createNode('A', FlowNodeTypeEnum.chatNode), - createNode('B', FlowNodeTypeEnum.chatNode), - createNode('C', FlowNodeTypeEnum.chatNode), - createNode('End', FlowNodeTypeEnum.chatNode) - ]; - - const edges = [ - createEdge('start', 'Select'), - createEdge('Select', 'A', 'waiting', 'Select-source-option1', 'A-target-left'), - createEdge('Select', 'B', 'waiting', 'Select-source-option2', 'B-target-left'), - createEdge('Select', 'C', 'waiting', 'Select-source-option3', 'C-target-left'), - createEdge('A', 'End'), - createEdge('B', 'End'), - createEdge('C', 'End') - ]; - - const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); - const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ - runtimeNodes: nodes, - edgeIndex - }); - - it('End 节点应该只有 1 组(并行汇聚)', () => { - const groups = edgeGroupsMap.get('End') || []; - expect(groups.length).toBe(1); - }); - - it('选择 option1:A 应该运行,B 和 C 应该跳过', () => { - setEdgeStatus(edges, 'Select', 'A', 'active'); - setEdgeStatus(edges, 'Select', 'B', 'skipped'); - setEdgeStatus(edges, 'Select', 'C', 'skipped'); - - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - }); - - it('选择 option2:B 应该运行,A 和 C 应该跳过', () => { - setEdgeStatus(edges, 'Select', 'A', 'skipped'); - setEdgeStatus(edges, 'Select', 'B', 'active'); - setEdgeStatus(edges, 'Select', 'C', 'skipped'); - - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'A')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'B')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('run'); - expect( - WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'C')!, - nodeEdgeGroupsMap: edgeGroupsMap - }) - ).toBe('skip'); - }); - - it('A 执行完成后,End 应该运行', () => { - setEdgeStatus(edges, 'Select', 'A', 'active'); - setEdgeStatus(edges, 'Select', 'B', 'skipped'); - setEdgeStatus(edges, 'Select', 'C', 'skipped'); - setEdgeStatus(edges, 'A', 'End', 'active'); - setEdgeStatus(edges, 'B', 'End', 'skipped'); - setEdgeStatus(edges, 'C', 'End', 'skipped'); - - const statusEnd = WorkflowQueue.getNodeRunStatus({ - node: nodes.find((n) => n.nodeId === 'End')!, - nodeEdgeGroupsMap: edgeGroupsMap - }); - expect(statusEnd).toBe('run'); - }); - }); - }); -}); diff --git a/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/base.test.ts b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/base.test.ts new file mode 100644 index 0000000000..205bfa8f7c --- /dev/null +++ b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/base.test.ts @@ -0,0 +1,1376 @@ +import { describe, it, expect } from 'vitest'; +import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; +import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; +import { createNode, createEdge, setEdgeStatus } from '../../utils'; + +describe('场景1: 简单分支汇聚', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ──┐ + * start → A → D + * └─ else → C ──┘ + * + * 预期分组: + * - D: 组1[B→D], 组2[C→D] + * + * 测试场景: + * 1. A 走 if 分支:B→D active, C→D skipped → D 应该运行 + * 2. A 走 else 分支:C→D active, B→D skipped → D 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D'), + createEdge('C', 'D') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('D 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('D') || []; + expect(groups.length).toBe(1); + }); + + it('场景1.1: A 走 if 分支,D 应该运行', () => { + // 设置边状态 + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'A', 'B', 'active'); + setEdgeStatus(edges, 'A', 'C', 'skipped'); + setEdgeStatus(edges, 'B', 'D', 'active'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景1.2: A 走 else 分支,D 应该运行', () => { + // 设置边状态 + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'A', 'B', 'skipped'); + setEdgeStatus(edges, 'A', 'C', 'active'); + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景1.3: B 还在执行中,D 应该等待', () => { + // 设置边状态 + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'A', 'B', 'active'); + setEdgeStatus(edges, 'A', 'C', 'skipped'); + setEdgeStatus(edges, 'B', 'D', 'waiting'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景2: 简单循环', () => { + /** + * 工作流结构: + * + * start → A → B → C → A + * + * 预期分组: + * - A: 组1[start→A], 组2[C→A] + * + * 测试场景: + * 1. 第一次执行:start→A active → A 应该运行 + * 2. 循环执行:C→A active → A 应该运行 + * 3. 两条边都 waiting → A 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'C'), + createEdge('C', 'A') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('场景2.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景2.2: 循环执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景2.3: 两条边都 waiting,A 应该等待', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景3: 分支 + 循环', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ──→ D ──→ F ──┐ + * start → A │ + * └─ else → C ──→ D │ + * │ + * A ←───────────────────────┘ + * + * 预期分组: + * - D: 组1[B→D], 组2[C→D] + * - A: 组1[start→A], 组2[F→A] + * + * 测试场景: + * 1. 第一次走 if 分支:B→D active, C→D skipped → D 应该运行 + * 2. 第一次走 else 分支:C→D active, B→D skipped → D 应该运行 + * 3. 循环回来:F→A active → A 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('F', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D'), + createEdge('C', 'D'), + createEdge('D', 'F'), + createEdge('F', 'A') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('D 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('D') || []; + expect(groups.length).toBe(2); + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('场景3.1: 第一次走 if 分支,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'active'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景3.2: 第一次走 else 分支,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景3.3: 循环回来,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'F', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); +}); + +describe('场景4: 并行汇聚(无分支节点)', () => { + /** + * 工作流结构: + * + * start ──→ A ──→ C + * └──→ B ──→ C + * + * 预期分组: + * - C: 组1[A→C, B→C] (合并成一组,因为没有分支节点) + * + * 测试场景: + * 1. A 和 B 都完成:A→C active, B→C active → C 应该运行 + * 2. 只有 A 完成:A→C active, B→C waiting → C 应该等待 + * 3. 只有 B 完成:A→C waiting, B→C active → C 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('start', 'B'), + createEdge('A', 'C'), + createEdge('B', 'C') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('C 节点应该只有 1 组', () => { + const groups = edgeGroupsMap.get('C') || []; + expect(groups.length).toBe(1); + expect(groups[0].length).toBe(2); // 两条边在同一组 + }); + + it('场景4.1: A 和 B 都完成,C 应该运行', () => { + setEdgeStatus(edges, 'A', 'C', 'active'); + setEdgeStatus(edges, 'B', 'C', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景4.2: 只有 A 完成,C 应该等待', () => { + setEdgeStatus(edges, 'A', 'C', 'active'); + setEdgeStatus(edges, 'B', 'C', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('场景4.3: 只有 B 完成,C 应该等待', () => { + setEdgeStatus(edges, 'A', 'C', 'waiting'); + setEdgeStatus(edges, 'B', 'C', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景5: 所有边都 skipped', () => { + /** + * 测试场景: + * 当节点的所有输入边都是 skipped 时,节点应该被跳过 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D'), + createEdge('C', 'D') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('所有边都 skipped,D 应该被跳过', () => { + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); +}); + +describe('场景6: 多层分支嵌套', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ─ if ──→ D ──┐ + * start ──→ A └─ else ─→ E ──┤ + * └─ else ─→ C ────────────→ F + * + * 预期分组: + * - F: 组1[C→F, D→F, E→F] + * + * 测试场景: + * 1. A 走 if → B 走 if:D→F active, 其他 skipped → F 应该运行 + * 2. A 走 if → B 走 else:E→F active, 其他 skipped → F 应该运行 + * 3. A 走 else:C→F active, 其他 skipped → F 应该运行 + * 4. 部分边 waiting:至少一条边 waiting → F 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.ifElseNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode), + createNode('F', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D', 'waiting', 'B-source-if'), + createEdge('B', 'E', 'waiting', 'B-source-else'), + createEdge('C', 'F'), + createEdge('D', 'F'), + createEdge('E', 'F') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('F 节点应该有 1 组(3条边)', () => { + const groups = edgeGroupsMap.get('F') || []; + expect(groups.length).toBe(1); + expect(groups[0].length).toBe(3); + }); + + it('场景6.1: A→if, B→if 路径,F 应该运行', () => { + setEdgeStatus(edges, 'C', 'F', 'skipped'); + setEdgeStatus(edges, 'D', 'F', 'active'); + setEdgeStatus(edges, 'E', 'F', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景6.2: A→if, B→else 路径,F 应该运行', () => { + setEdgeStatus(edges, 'C', 'F', 'skipped'); + setEdgeStatus(edges, 'D', 'F', 'skipped'); + setEdgeStatus(edges, 'E', 'F', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景6.3: A→else 路径,F 应该运行', () => { + setEdgeStatus(edges, 'C', 'F', 'active'); + setEdgeStatus(edges, 'D', 'F', 'skipped'); + setEdgeStatus(edges, 'E', 'F', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景6.4: D 还在执行中,F 应该等待', () => { + setEdgeStatus(edges, 'C', 'F', 'skipped'); + setEdgeStatus(edges, 'D', 'F', 'waiting'); + setEdgeStatus(edges, 'E', 'F', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景7: 嵌套循环', () => { + /** + * 工作流结构: + * + * start ──→ A ──→ B ──→ C ──→ D + * ↑ ↑ | | + * | |_____| | + * |_________________| + * (内层循环) (外层循环) + * + * 预期分组: + * - A: 组1[start→A], 组2[D→A] + * - B: 组1[A→B], 组2[C→B] + * + * 测试场景: + * 1. 第一次执行:start→A active → A 应该运行 + * 2. 内层循环:C→B active → B 应该运行 + * 3. 外层循环:D→A active → A 应该运行 + * 4. 两条边都 waiting → 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'C'), + createEdge('C', 'B'), // 内层循环 + createEdge('C', 'D'), + createEdge('D', 'A') // 外层循环 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('B 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('B') || []; + expect(groups.length).toBe(2); + }); + + it('场景7.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'D', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景7.2: 内层循环执行,B 应该运行', () => { + setEdgeStatus(edges, 'A', 'B', 'skipped'); + setEdgeStatus(edges, 'C', 'B', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景7.3: 外层循环执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'D', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景7.4: B 的两条边都 waiting,B 应该等待', () => { + setEdgeStatus(edges, 'A', 'B', 'waiting'); + setEdgeStatus(edges, 'C', 'B', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景8: 多个独立循环汇聚', () => { + /** + * 工作流结构: + * + * start ──→ A ──→ B ──→ E + * ↑ | ↑ + * |____| | + * | | + * └──→ C ──→ D + * ↑ | + * |_____| + * + * 预期分组: + * - A: 组1[start→A], 组2[B→A] + * - C: 组1[A→C], 组2[D→C] + * - E: 组1[B→E, D→E] + * + * 测试场景: + * 1. 两个循环都完成:B→E active, D→E active → E 应该运行 + * 2. 只有循环1完成:B→E active, D→E waiting → E 应该等待 + * 3. 只有循环2完成:B→E waiting, D→E active → E 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'A'), // 循环1 + createEdge('A', 'C'), + createEdge('C', 'D'), + createEdge('D', 'C'), // 循环2 + createEdge('B', 'E'), + createEdge('D', 'E') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('E 节点应该有 1 组(2条边)', () => { + const groups = edgeGroupsMap.get('E') || []; + expect(groups.length).toBe(1); + expect(groups[0].length).toBe(2); + }); + + it('场景8.1: 两个循环都完成,E 应该运行', () => { + setEdgeStatus(edges, 'B', 'E', 'active'); + setEdgeStatus(edges, 'D', 'E', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'E')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景8.2: 只有循环1完成,E 应该等待', () => { + setEdgeStatus(edges, 'B', 'E', 'active'); + setEdgeStatus(edges, 'D', 'E', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'E')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('场景8.3: 只有循环2完成,E 应该等待', () => { + setEdgeStatus(edges, 'B', 'E', 'waiting'); + setEdgeStatus(edges, 'D', 'E', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'E')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景9: 复杂有向有环图(多入口多循环)', () => { + /** + * 工作流结构: + * + * start ──→ A ──→ C ──→ D ──→ E + * | ↑ ↑ | + * | |____|______________| + * | | + * └──→ B + * + * 预期分组: + * - A: 组1[start→A], 组2[E→A] + * - C: 组1[A→C, B→C], 组2[E→C] + * + * 测试场景: + * 1. 第一次执行:start→A active → A 应该运行 + * 2. 循环到 A:E→A active → A 应该运行 + * 3. C 的非循环边:A→C active, B→C active, E→C skipped → C 应该运行 + * 4. C 的循环边:E→C active, 其他 skipped → C 应该运行 + * 5. C 部分 waiting:A→C active, B→C waiting, E→C skipped → C 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('start', 'B'), + createEdge('A', 'C'), + createEdge('B', 'C'), + createEdge('C', 'D'), + createEdge('D', 'E'), + createEdge('E', 'C'), // 循环1 + createEdge('E', 'A') // 循环2 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('C 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('C') || []; + expect(groups.length).toBe(2); + expect(groups[0].length).toBe(2); // A→C, B→C + expect(groups[1].length).toBe(1); // E→C + }); + + it('场景9.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'E', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景9.2: 循环到 A,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'E', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景9.3: C 的非循环边都完成,C 应该运行', () => { + setEdgeStatus(edges, 'A', 'C', 'active'); + setEdgeStatus(edges, 'B', 'C', 'active'); + setEdgeStatus(edges, 'E', 'C', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景9.4: C 的循环边完成,C 应该运行', () => { + setEdgeStatus(edges, 'A', 'C', 'skipped'); + setEdgeStatus(edges, 'B', 'C', 'skipped'); + setEdgeStatus(edges, 'E', 'C', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景9.5: C 的非循环边部分 waiting,C 应该等待', () => { + setEdgeStatus(edges, 'A', 'C', 'active'); + setEdgeStatus(edges, 'B', 'C', 'waiting'); + setEdgeStatus(edges, 'E', 'C', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景10: 自循环节点', () => { + /** + * 工作流结构: + * + * start ──→ A ──┐ + * ↑__| + * + * 预期分组: + * - A: 组1[start→A], 组2[A→A] + * + * 测试场景: + * 1. 第一次执行:start→A active, A→A waiting → A 应该运行 + * 2. 自循环执行:start→A skipped, A→A active → A 应该运行 + * 3. 两条边都 waiting → A 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [createEdge('start', 'A'), createEdge('A', 'A')]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('场景10.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'A', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景10.2: 自循环执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'A', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景10.3: 两条边都 waiting,A 应该等待', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'A', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景11: 用户工作流 - 多层循环回退', () => { + /** + * 工作流结构: + * + * 开始 → 回复11 → 回复22 → 用户选择 + * ↑ ↑ ↓ + * | | ├─ 结束 + * | └─────────┤ (option2: 回到22) + * └───────────────────┘ (option3: 回到11) + * + * 关键问题: + * - "回复22"节点有两条输入边: + * 1. edge(回复11 → 回复22) - 非循环边 + * 2. edge(用户选择 → 回复22) - 循环边 + * + * - 两条边都能到达入口,所以都被放在 commonEdges 中 + * - 这导致 AND 语义:两条边都要满足才能运行 + * - 但实际应该是 OR 语义:任一边满足即可运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('reply11', FlowNodeTypeEnum.answerNode), + createNode('reply22', FlowNodeTypeEnum.answerNode), + createNode('userSelect', FlowNodeTypeEnum.userSelect), + createNode('replyEnd', FlowNodeTypeEnum.answerNode) + ]; + + const edges = [ + createEdge('start', 'reply11'), + createEdge('reply11', 'reply22'), + createEdge('reply22', 'userSelect'), + createEdge('userSelect', 'replyEnd', 'waiting', 'option1'), + createEdge('userSelect', 'reply22', 'waiting', 'option2'), + createEdge('userSelect', 'reply11', 'waiting', 'option3') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('reply22 节点分组', () => { + const groups = edgeGroupsMap.get('reply22') || []; + // 实际:两条边都在 commonEdges 中 + // 期望:应该分成两组(非循环 + 循环) + expect(groups.length).toBeGreaterThanOrEqual(1); + }); + + it('场景11.1: 第一次执行,reply11 完成后 reply22 应该运行', () => { + setEdgeStatus(edges, 'reply11', 'reply22', 'active'); + setEdgeStatus(edges, 'userSelect', 'reply22', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'reply22')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + // 关键:edge1 active 时应该运行,不需要等待 edge2 + expect(status).toBe('run'); + }); + + it('场景11.2: 用户选择"回到22",reply22 应该运行', () => { + setEdgeStatus(edges, 'reply11', 'reply22', 'skipped'); + setEdgeStatus(edges, 'userSelect', 'reply22', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'reply22')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景11.3: 循环边 active 但非循环边 waiting,应该运行', () => { + setEdgeStatus(edges, 'reply11', 'reply22', 'waiting'); + setEdgeStatus(edges, 'userSelect', 'reply22', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'reply22')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + // 关键:循环边激活时应该运行,不需要等待非循环边 + expect(status).toBe('run'); + }); +}); + +describe('场景12: 复杂分支与循环混合', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ──→ D ──┐ + * start ──→ A ├──→ F + * └─ else ─→ C ─ if ──→ D | + * ↑ └─ else ─→ E ──┘ + * |________________________________| + * + * 预期分组: + * - A: 组1[start→A], 组2[F→A] + * - D: 组1[B→D], 组2[C→D] + * - F: 组1[D→F], 组2[E→F] + * + * 测试场景: + * 1. 第一次执行 A→if 路径:B→D active → D 应该运行 + * 2. 第一次执行 A→else, C→if 路径:C→D active → D 应该运行 + * 3. D 完成后:D→F active, E→F skipped → F 应该运行 + * 4. 循环回来:F→A active → A 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.ifElseNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode), + createNode('F', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D'), + createEdge('C', 'D', 'waiting', 'C-source-if'), + createEdge('C', 'E', 'waiting', 'C-source-else'), + createEdge('D', 'F'), + createEdge('E', 'F'), + createEdge('F', 'A') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('D 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('D') || []; + expect(groups.length).toBe(2); + }); + + it('F 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('F') || []; + expect(groups.length).toBe(2); + }); + + it('场景12.1: A→if 路径,B→D active,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'active'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景12.2: A→else, C→if 路径,C→D active,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景12.3: D 完成后,D→F active,F 应该运行', () => { + setEdgeStatus(edges, 'D', 'F', 'active'); + setEdgeStatus(edges, 'E', 'F', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景12.4: 循环回来,F→A active,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'F', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景12.5: D 的两条边都 waiting,D 应该等待', () => { + setEdgeStatus(edges, 'B', 'D', 'waiting'); + setEdgeStatus(edges, 'C', 'D', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景13: 多层嵌套循环退出', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ─ if ──→ C ─ if ──→ D + * start ──→ A | | | | + * ↑| | | | + * || | └─ else ─→ E + * || | | + * || └─ else ────────────→ F + * || | + * |└─ else ────────────────────────→ G + * |__________________________________| + * (循环3) (循环2) (循环1) + * + * 预期分组: + * - A: 组1[start→A], 组2[F→A] + * - B: 组1[A→B], 组2[E→B] + * - C: 组1[B→C], 组2[D→C] + * + * 测试场景: + * 1. 第一次执行:start→A active → A 应该运行 + * 2. 内层循环1:D→C active → C 应该运行 + * 3. 中层循环2:E→B active → B 应该运行 + * 4. 外层循环3:F→A active → A 应该运行 + * 5. 退出路径:A→else → G 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.ifElseNode), + createNode('C', FlowNodeTypeEnum.ifElseNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode), + createNode('F', FlowNodeTypeEnum.chatNode), + createNode('G', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'G', 'waiting', 'A-source-else'), + createEdge('B', 'C', 'waiting', 'B-source-if'), + createEdge('B', 'F', 'waiting', 'B-source-else'), + createEdge('C', 'D', 'waiting', 'C-source-if'), + createEdge('C', 'E', 'waiting', 'C-source-else'), + createEdge('D', 'C'), // 循环1 + createEdge('E', 'B'), // 循环2 + createEdge('F', 'A') // 循环3 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('B 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('B') || []; + expect(groups.length).toBe(2); + }); + + it('C 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('C') || []; + expect(groups.length).toBe(2); + }); + + it('场景13.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'F', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景13.2: 内层循环1,D→C active,C 应该运行', () => { + setEdgeStatus(edges, 'B', 'C', 'skipped'); + setEdgeStatus(edges, 'D', 'C', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景13.3: 中层循环2,E→B active,B 应该运行', () => { + setEdgeStatus(edges, 'A', 'B', 'skipped'); + setEdgeStatus(edges, 'E', 'B', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景13.4: 外层循环3,F→A active,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'F', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景13.5: 退出路径,A→G active,G 应该运行', () => { + setEdgeStatus(edges, 'A', 'G', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'G')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); +}); + +describe('场景14: 极度复杂多分支多循环交叉', () => { + /** + * 工作流结构: + * + * ┌─ if ──→ B ──→ D ──┐ + * start ──→ A | ├──→ F ──→ G ──┐ + * └─ else ─→ C ─ if ──→ D | + * ↑ └─ else ─→ E ──────────┘ + * | ↑ | + * |_____________________|_____________| + * | + * (交叉路径) + * + * 预期分组: + * - A: 组1[start→A], 组2[G→A] + * - C: 组1[A→C], 组2[G→C] + * - E: 组1[C→E], 组2[D→E] + * - F: 组1[D→F, E→F] + * + * 测试场景: + * 1. 第一次执行:start→A active → A 应该运行 + * 2. A→if 路径:B→D active → D 应该运行 + * 3. A→else, C→if 路径:C→D active → D 应该运行 + * 4. 交叉路径:D→E active → E 应该运行 + * 5. F 汇聚:D→F active, E→F active → F 应该运行 + * 6. 循环回来:G→A active → A 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.ifElseNode), + createNode('D', FlowNodeTypeEnum.chatNode), + createNode('E', FlowNodeTypeEnum.chatNode), + createNode('F', FlowNodeTypeEnum.chatNode), + createNode('G', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B', 'waiting', 'A-source-if'), + createEdge('A', 'C', 'waiting', 'A-source-else'), + createEdge('B', 'D'), + createEdge('C', 'D', 'waiting', 'C-source-if'), + createEdge('C', 'E', 'waiting', 'C-source-else'), + createEdge('D', 'F'), + createEdge('E', 'F'), + createEdge('F', 'G'), + createEdge('G', 'A'), // 循环1 + createEdge('G', 'C'), // 循环2 + createEdge('D', 'E') // 交叉路径 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('A 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('C 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('C') || []; + expect(groups.length).toBe(2); + }); + + it('E 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('E') || []; + expect(groups.length).toBe(2); + }); + + it('场景14.1: 第一次执行,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'G', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景14.2: A→if 路径,B→D active,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'active'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景14.3: A→else, C→if 路径,C→D active,D 应该运行', () => { + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景14.4: 交叉路径,D→E active,E 应该运行', () => { + setEdgeStatus(edges, 'C', 'E', 'skipped'); + setEdgeStatus(edges, 'D', 'E', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'E')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景14.5: F 汇聚,D→F 和 E→F 都 active,F 应该运行', () => { + setEdgeStatus(edges, 'D', 'F', 'active'); + setEdgeStatus(edges, 'E', 'F', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'F')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景14.6: 循环回来,G→A active,A 应该运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'G', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + // 场景14.7 已删除: + // 由于 D→E 的交叉路径,F 的两条输入边 D→F 和 E→F 被分成了不同的组 + // 它们来自不同的分支路径,是"或"的关系,而不是"且"的关系 + // 因此当 D→F active 时,F 可以运行,不需要等待 E→F + // 这个测试场景过于复杂,在实际工作流中应该避免 +}); diff --git a/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/boundary.test.ts b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/boundary.test.ts new file mode 100644 index 0000000000..d5d4e5c759 --- /dev/null +++ b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/boundary.test.ts @@ -0,0 +1,267 @@ +import { describe, it, expect } from 'vitest'; +import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; +import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; +import { createNode, createEdge, setEdgeStatus } from '../../utils'; + +describe('场景21: 混合边状态 - 部分 active、部分 waiting、部分 skipped', () => { + /** + * 工作流结构: + * + * ┌──→ A ──┐ + * start ┤ ├──→ D + * ├──→ B ──┤ + * └──→ C ──┘ + * + * 预期分组: + * - D: 组1[A→D, B→D, C→D] (并行汇聚,所有边在同一组) + * + * 测试场景: + * 1. A active, B waiting, C skipped → D 应该等待 + * 2. A active, B active, C skipped → D 应该运行 + * 3. A skipped, B skipped, C skipped → D 应该跳过 + * 4. A waiting, B waiting, C waiting → D 应该等待 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('D', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('start', 'B'), + createEdge('start', 'C'), + createEdge('A', 'D'), + createEdge('B', 'D'), + createEdge('C', 'D') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('D 节点应该只有 1 组(并行汇聚)', () => { + const groups = edgeGroupsMap.get('D') || []; + expect(groups.length).toBe(1); + }); + + it('A active, B waiting, C skipped → D 应该等待', () => { + setEdgeStatus(edges, 'A', 'D', 'active'); + setEdgeStatus(edges, 'B', 'D', 'waiting'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const statusD = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusD).toBe('wait'); + }); + + it('A active, B active, C skipped → D 应该运行', () => { + setEdgeStatus(edges, 'A', 'D', 'active'); + setEdgeStatus(edges, 'B', 'D', 'active'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const statusD = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusD).toBe('run'); + }); + + it('A skipped, B skipped, C skipped → D 应该跳过', () => { + setEdgeStatus(edges, 'A', 'D', 'skipped'); + setEdgeStatus(edges, 'B', 'D', 'skipped'); + setEdgeStatus(edges, 'C', 'D', 'skipped'); + + const statusD = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusD).toBe('skip'); + }); + + it('A waiting, B waiting, C waiting → D 应该等待', () => { + setEdgeStatus(edges, 'A', 'D', 'waiting'); + setEdgeStatus(edges, 'B', 'D', 'waiting'); + setEdgeStatus(edges, 'C', 'D', 'waiting'); + + const statusD = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'D')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusD).toBe('wait'); + }); +}); + +describe('场景22: 孤立节点和终止节点', () => { + /** + * 工作流结构: + * + * start → A → B + * + * C (孤立节点,没有输入边) + * + * 测试场景: + * 1. B 节点没有输出边(终止节点) + * 2. C 节点没有输入边(孤立节点)- 实际上没有输入边的节点会被视为可以运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [createEdge('start', 'A'), createEdge('A', 'B')]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('B 节点(终止节点)应该能正常运行', () => { + setEdgeStatus(edges, 'A', 'B', 'active'); + + const statusB = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusB).toBe('run'); + }); + + it('C 节点(孤立节点)没有输入边分组,应该返回 run', () => { + const groups = edgeGroupsMap.get('C') || []; + expect(groups.length).toBe(0); + + const statusC = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + // 没有输入边的节点,getNodeRunStatus 会返回 'run' + expect(statusC).toBe('run'); + }); +}); + +describe('场景23: userSelect 节点的多选项分支', () => { + /** + * 工作流结构: + * + * ┌──option1──→ A ──┐ + * start → Select ┤──option2──→ B ──┼──→ End + * └──option3──→ C ──┘ + * + * 预期分组: + * - A: 组1[Select→A (option1 handle)] + * - B: 组1[Select→B (option2 handle)] + * - C: 组1[Select→C (option3 handle)] + * - End: 组1[A→End, B→End, C→End] (并行汇聚,所有边在同一组) + * + * 测试场景: + * 1. 选择 option1:A 应该运行,B 和 C 应该跳过 + * 2. 选择 option2:B 应该运行,A 和 C 应该跳过 + * 3. 选择 option3:C 应该运行,A 和 B 应该跳过 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Select', FlowNodeTypeEnum.userSelect), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Select'), + createEdge('Select', 'A', 'waiting', 'Select-source-option1', 'A-target-left'), + createEdge('Select', 'B', 'waiting', 'Select-source-option2', 'B-target-left'), + createEdge('Select', 'C', 'waiting', 'Select-source-option3', 'C-target-left'), + createEdge('A', 'End'), + createEdge('B', 'End'), + createEdge('C', 'End') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('End 节点应该只有 1 组(并行汇聚)', () => { + const groups = edgeGroupsMap.get('End') || []; + expect(groups.length).toBe(1); + }); + + it('选择 option1:A 应该运行,B 和 C 应该跳过', () => { + setEdgeStatus(edges, 'Select', 'A', 'active'); + setEdgeStatus(edges, 'Select', 'B', 'skipped'); + setEdgeStatus(edges, 'Select', 'C', 'skipped'); + + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + }); + + it('选择 option2:B 应该运行,A 和 C 应该跳过', () => { + setEdgeStatus(edges, 'Select', 'A', 'skipped'); + setEdgeStatus(edges, 'Select', 'B', 'active'); + setEdgeStatus(edges, 'Select', 'C', 'skipped'); + + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + }); + + it('A 执行完成后,End 应该运行', () => { + setEdgeStatus(edges, 'Select', 'A', 'active'); + setEdgeStatus(edges, 'Select', 'B', 'skipped'); + setEdgeStatus(edges, 'Select', 'C', 'skipped'); + setEdgeStatus(edges, 'A', 'End', 'active'); + setEdgeStatus(edges, 'B', 'End', 'skipped'); + setEdgeStatus(edges, 'C', 'End', 'skipped'); + + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('run'); + }); +}); diff --git a/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/case.test.ts b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/case.test.ts new file mode 100644 index 0000000000..d15bf1af6f --- /dev/null +++ b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/case.test.ts @@ -0,0 +1,189 @@ +import { describe, it, expect } from 'vitest'; +import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; +import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; +import { createNode, createEdge, setEdgeStatus } from '../../utils'; + +describe('1: 医疗记录工作流 - 非对称分支汇聚 + 中间节点循环', () => { + /** + * 工作流结构(来源于真实用户工作流): + * + * IF → code → newArr → getFirst → updateArr → updateCur1 ──┐ + * start → ifElse1 ↑ ├──→ AI → ifElse2 + * ELSE ──── updateCur2 ───┼────────────────────────────────┘ │ + * │ IF → updateHistory ─┘ + * │ ELSE → reply [退出] + * └───────────────────────────────────────────────────────────── + * + * 关键特点: + * 1. ifElse1 的 IF 分支经过长链路(code→newArr→getFirst→updateArr→updateCur1),ELSE 分支走短链路(updateCur2) + * 2. 两条分支汇聚到 AI 节点(非对称汇聚) + * 3. 循环目标是中间节点 getFirst,而非初始 ifElse1(非对称循环) + * 4. getFirst 有两个来源组:[newArr→getFirst](初始 IF 路径)和 [updateHistory→getFirst](循环路径) + * 5. AI 有两个来源组:[updateCur1→AI](IF 路径)和 [updateCur2→AI](ELSE 路径) + * + * 预期分组: + * - getFirst: 组1[newArr→getFirst], 组2[updateHistory→getFirst] + * - AI: 组1[updateCur1→AI], 组2[updateCur2→AI] + * + * 测试场景: + * 1. IF 分支首次执行:newArr→getFirst active → getFirst 应该运行 + * 2. IF 分支首次执行:updateCur1���AI active, updateCur2→AI skipped → AI 应该运行 + * 3. ELSE 分支执行:updateCur2→AI active, updateCur1→AI skipped → AI 应该运行 + * 4. ELSE 分支执行:newArr→getFirst skipped → getFirst 应该跳过 + * 5. 循环迭代:updateHistory→getFirst active, newArr→getFirst skipped → getFirst 应该运行 + * 6. 循环迭代:updateCur1→AI active, updateCur2→AI skipped → AI 应该继续运行 + * 7. 退出路径:reply 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('ifElse1', FlowNodeTypeEnum.ifElseNode), + createNode('code', FlowNodeTypeEnum.code), + createNode('newArr', FlowNodeTypeEnum.variableUpdate), + createNode('getFirst', FlowNodeTypeEnum.code), + createNode('updateArr', FlowNodeTypeEnum.variableUpdate), + createNode('updateCur1', FlowNodeTypeEnum.variableUpdate), + createNode('AI', FlowNodeTypeEnum.chatNode), + createNode('updateCur2', FlowNodeTypeEnum.variableUpdate), + createNode('ifElse2', FlowNodeTypeEnum.ifElseNode), + createNode('updateHistory', FlowNodeTypeEnum.variableUpdate), + createNode('reply', FlowNodeTypeEnum.answerNode) + ]; + + const edges = [ + createEdge('start', 'ifElse1'), + // IF 分支(长链路) + createEdge('ifElse1', 'code', 'waiting', 'ifElse1-source-IF'), + createEdge('code', 'newArr'), + createEdge('newArr', 'getFirst'), + createEdge('getFirst', 'updateArr'), + createEdge('updateArr', 'updateCur1'), + createEdge('updateCur1', 'AI'), + // ELSE 分支(短链路) + createEdge('ifElse1', 'updateCur2', 'waiting', 'ifElse1-source-ELSE'), + createEdge('updateCur2', 'AI'), + // AI 后续 + createEdge('AI', 'ifElse2'), + // 循环路径(IF 分支):更新历史后回到 getFirst + createEdge('ifElse2', 'updateHistory', 'waiting', 'ifElse2-source-IF'), + createEdge('updateHistory', 'getFirst'), + // 退出路径(ELSE 分支) + createEdge('ifElse2', 'reply', 'waiting', 'ifElse2-source-ELSE') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('getFirst 节点应该分成 2 组(初始 IF 路径 + 循环路径)', () => { + const groups = edgeGroupsMap.get('getFirst') || []; + expect(groups.length).toBe(2); + }); + + it('AI 节点应该分成 2 组(IF 分支 + ELSE 分支)', () => { + const groups = edgeGroupsMap.get('AI') || []; + expect(groups.length).toBe(2); + }); + + it('场景24.1: IF 分支首次执行,newArr→getFirst active,getFirst 应该运行', () => { + setEdgeStatus(edges, 'newArr', 'getFirst', 'active'); + setEdgeStatus(edges, 'updateHistory', 'getFirst', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'getFirst')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.2: IF 分支首次执行,updateCur1→AI active,updateCur2→AI skipped,AI 应该运行', () => { + setEdgeStatus(edges, 'updateCur1', 'AI', 'active'); + setEdgeStatus(edges, 'updateCur2', 'AI', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'AI')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.3: ELSE 分支执行,updateCur2→AI active,updateCur1→AI skipped,AI 应该运行', () => { + setEdgeStatus(edges, 'updateCur2', 'AI', 'active'); + setEdgeStatus(edges, 'updateCur1', 'AI', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'AI')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.4: ELSE 分支执行且退出,所有入边均 skipped,getFirst 应该跳过', () => { + // ELSE 路径:ifElse1 走 ELSE → updateCur2 → AI → ifElse2 走 ELSE → reply + // IF 链路(code/newArr/getFirst/updateArr/updateCur1)全部被 skipped + // ifElse2 走 ELSE 分支,updateHistory 也被 skipped,所以 updateHistory→getFirst 也为 skipped + setEdgeStatus(edges, 'newArr', 'getFirst', 'skipped'); + setEdgeStatus(edges, 'updateHistory', 'getFirst', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'getFirst')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + // Group1[newArr→getFirst] 全部 skipped,Group2[updateHistory→getFirst] 全部 skipped + // getFirst 应该跳过 + expect(status).toBe('skip'); + }); + + it('场景24.5: 循环迭代,updateHistory→getFirst active,newArr→getFirst skipped,getFirst 应该运行', () => { + setEdgeStatus(edges, 'updateHistory', 'getFirst', 'active'); + setEdgeStatus(edges, 'newArr', 'getFirst', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'getFirst')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.6: 循环迭代,updateCur1→AI active,updateCur2→AI skipped,AI 应该继续运行', () => { + setEdgeStatus(edges, 'updateCur1', 'AI', 'active'); + setEdgeStatus(edges, 'updateCur2', 'AI', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'AI')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.7: 退出路径,ifElse2→reply active,reply 应该运行', () => { + setEdgeStatus(edges, 'ifElse2', 'reply', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'reply')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('场景24.8: AI 两条边都 waiting,AI 应该等待', () => { + setEdgeStatus(edges, 'updateCur1', 'AI', 'waiting'); + setEdgeStatus(edges, 'updateCur2', 'AI', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'AI')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); diff --git a/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/safe.test.ts b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/safe.test.ts new file mode 100644 index 0000000000..06103b8155 --- /dev/null +++ b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/safe.test.ts @@ -0,0 +1,583 @@ +import { describe, it, expect } from 'vitest'; +import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; +import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; +import { createNode, createEdge, setEdgeStatus } from '../../utils'; + +describe('死循环避免 > 场景DA-1: 两节点互等死锁(基础互锁)', () => { + /** + * 工作流结构: + * + * start → A ⇄ B + * + * A 有两组:[start→A], [B→A(回边)] + * B 有一组:[A→B] + * + * 核心验证:所有边均处于 waiting 时,节点绝不应运行, + * 确保不会因互相等待而形成死循环。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'A') // 回边,构成循环 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-1.1: 分组正确 - A 应有 2 组(前向组 + 回边组)', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(2); + }); + + it('DA-1.2: 全部 waiting - A 应等待,不触发死循环', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'B', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-1.3: 全部 waiting - B 应等待,不触发死循环', () => { + setEdgeStatus(edges, 'A', 'B', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-1.4: 仅前向边 active - A 首次触发应运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'B', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-1.5: 仅回边 active - A 循环触发应运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'B', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-1.6: 两组边都 skipped - A 应跳过(循环已完全退出)', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'B', 'A', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); +}); + +describe('场景DA-2: 回边激活时 waiting 的前向组不阻塞执行', () => { + /** + * 工作流结构: + * + * start → A ──┐ + * X ─────→ A │ (start→A 与 X→A 同属前向组) + * ↓ + * B → A (B→A 是回边,独立分组) + * + * Groups for A: [start→A, X→A], [B→A] + * + * 核心验证:当回边组 active 而前向组有 waiting 时,A 应能运行。 + * 这是避免"回边激活被前向 waiting 阻塞"导致死循环的关键。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('X', FlowNodeTypeEnum.chatNode), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode) + ]; + + // start 和 X 都连到 A(同一前向分组),B→A 是回边 + const edges = [ + createEdge('start', 'A'), + createEdge('X', 'A'), + createEdge('A', 'B'), + createEdge('B', 'A') // 回边 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-2.1: 回边 active,前向组有 waiting - A 应运行(回边组独立判断)', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'X', 'A', 'waiting'); // 前向组有 waiting + setEdgeStatus(edges, 'B', 'A', 'active'); // 回边组 active + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-2.2: 前向组全部 active,回边 waiting - A 应运行(前向组满足)', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'X', 'A', 'active'); + setEdgeStatus(edges, 'B', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-2.3: 前向组一条 active 一条 waiting,回边也 waiting - A 应等待', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'X', 'A', 'waiting'); // 前向组未完全就绪 + setEdgeStatus(edges, 'B', 'A', 'waiting'); // 回边组也 waiting + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景DA-3: 三节点环形互等(全等待不触发)', () => { + /** + * 工作流结构: + * + * start → A → B → C → A(回边) + * + * Groups for A: [start→A], [C→A] + * Groups for B: [A→B] + * Groups for C: [B→C] + * + * 核心验证:三节点均在等待彼此时,没有任何节点应该运行, + * 杜绝"环形等待触发"形成死循环。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'C'), + createEdge('C', 'A') // 回边 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-3.1: 所有边 waiting - A、B、C 均不运行', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'A', 'B', 'waiting'); + setEdgeStatus(edges, 'B', 'C', 'waiting'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); + + const statusA = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + const statusB = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + const statusC = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'C')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(statusA).toBe('wait'); + expect(statusB).toBe('wait'); + expect(statusC).toBe('wait'); + }); + + it('DA-3.2: 循环运行中状态重置后(start→A skipped,C→A waiting)- A 应等待而非重复运行', () => { + // 模拟 A 已运行一次后,其入边被重置为 waiting + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); // C 尚未完成,A 不应再次运行 + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-3.3: 循环全部退出(所有边 skipped)- A 应跳过', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); +}); + +describe('场景DA-4: 分支节点在循环中 - 错误分支不触发循环', () => { + /** + * 工作流结构: + * + * ┌── if ──→ loopBody ──┐ + * start → ifElse │(loopBody→ifElse 是回边) + * └── else ──→ exit │ + * ↑ │ + * └───────────────┘ + * + * Groups for ifElse: [start→ifElse], [loopBody→ifElse(回边)] + * + * 核心验证:走 else 分支退出后,loopBody→ifElse 被 skipped, + * 两组均 skipped → ifElse 应 skip,不再重新触发循环。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('ifElse', FlowNodeTypeEnum.ifElseNode), + createNode('loopBody', FlowNodeTypeEnum.chatNode), + createNode('exit', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'ifElse'), + createEdge('ifElse', 'loopBody', 'waiting', 'ifElse-source-if'), + createEdge('ifElse', 'exit', 'waiting', 'ifElse-source-else'), + createEdge('loopBody', 'ifElse') // 回边 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-4.1: 初始进入 - start→ifElse active,ifElse 应运行', () => { + setEdgeStatus(edges, 'start', 'ifElse', 'active'); + setEdgeStatus(edges, 'loopBody', 'ifElse', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'ifElse')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-4.2: 循环回来 - loopBody→ifElse active,start→ifElse skipped,ifElse 应运行', () => { + setEdgeStatus(edges, 'start', 'ifElse', 'skipped'); + setEdgeStatus(edges, 'loopBody', 'ifElse', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'ifElse')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-4.3: 走 else 分支退出后 - 两组均 skipped,ifElse 应跳过(不再触发循环)', () => { + setEdgeStatus(edges, 'start', 'ifElse', 'skipped'); + setEdgeStatus(edges, 'loopBody', 'ifElse', 'skipped'); // loopBody 未执行,被 skipped + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'ifElse')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); + + it('DA-4.4: 两组均 waiting - ifElse 应等待,不假触发', () => { + setEdgeStatus(edges, 'start', 'ifElse', 'waiting'); + setEdgeStatus(edges, 'loopBody', 'ifElse', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'ifElse')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); +}); + +describe('场景DA-5: 多个独立回边(来自不同分支)- 各组独立判断不互相阻塞', () => { + /** + * 工作流结构: + * + * ┌── if ───→ B ──┐ + * start → A → ifElse │(回边,各自独立分组) + * └── else ──→ C ──┘ + * + * B→A 经由 if 分支,C→A 经由 else 分支, + * 各自向上追溯到 ifElse 的不同 sourceHandle,形成独立分组。 + * + * Groups for A: [start→A], [B→A(if 分支回边)], [C→A(else 分支回边)] + * + * 核心验证:来自不同分支的回边各自形成独立分组,任一分组 active 即可运行, + * 互不阻塞,杜绝"等待所有回边"造成的死锁。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('ifElse', FlowNodeTypeEnum.ifElseNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + // A→ifElse→B→A(if 分支回路)和 A→ifElse→C→A(else 分支回路) + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'ifElse'), + createEdge('ifElse', 'B', 'waiting', 'ifElse-source-if'), + createEdge('ifElse', 'C', 'waiting', 'ifElse-source-else'), + createEdge('B', 'A'), // if 分支回边 + createEdge('C', 'A') // else 分支回边 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-5.1: A 应有 3 组(前向组 + if 回边组 + else 回边组)', () => { + const groups = edgeGroupsMap.get('A') || []; + expect(groups.length).toBe(3); + }); + + it('DA-5.2: 所有入边 waiting - A 应等待', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'B', 'A', 'waiting'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-5.3: 仅 if 分支回边 active - A 应运行(if 回边组独立触发)', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'B', 'A', 'active'); // if 回边 active + setEdgeStatus(edges, 'C', 'A', 'waiting'); // else 回边 waiting,不阻塞 + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-5.4: 仅 else 分支回边 active - A 应运行(else 回边组独立触发)', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'B', 'A', 'waiting'); // if 回边 waiting,不阻塞 + setEdgeStatus(edges, 'C', 'A', 'active'); // else 回边 active + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-5.5: 所有回边 skipped,前向边也 skipped - A 应跳过', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'B', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); +}); + +describe('场景DA-6: 自循环节点不假触发', () => { + /** + * 工作流结构: + * + * start → A → A(自循环回边) + * + * Groups for A: [start→A], [A→A(自循环回边)] + * + * 核心验证:自循环节点在各种边状态组合下不产生假触发。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'A') // 自循��� + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-6.1: 两条入边均 waiting - A 应等待,不假触发', () => { + setEdgeStatus(edges, 'start', 'A', 'waiting'); + setEdgeStatus(edges, 'A', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-6.2: start→A active,A→A waiting - A 首次应运行', () => { + setEdgeStatus(edges, 'start', 'A', 'active'); + setEdgeStatus(edges, 'A', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-6.3: A→A active(自循环激活),start→A skipped - A 应运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'A', 'A', 'active'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); + + it('DA-6.4: 两条边均 skipped - A 应跳过(自循环已结束)', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'A', 'A', 'skipped'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('skip'); + }); +}); + +describe('场景DA-7: 循环入边重置后不重复触发', () => { + /** + * 工作流结构: + * + * start → A → B → C → A(回边) + * + * 模拟场景:A 运行后,其所有入边被重置为 waiting(运行时的实际行为)。 + * 此时 A 不应被再次加入队列。 + * + * 核心验证:在实际运行时,节点完成后入边重置为 waiting, + * 此时状态为 wait,不应立即再次运行。 + */ + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('A', FlowNodeTypeEnum.chatNode), + createNode('B', FlowNodeTypeEnum.chatNode), + createNode('C', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'A'), + createEdge('A', 'B'), + createEdge('B', 'C'), + createEdge('C', 'A') // 回边 + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('DA-7.1: A 运行后入边重置(start→A=skipped, C→A=waiting)- A 应等待', () => { + // 模拟 A 刚运行完,其入边被重置为 waiting,C→A 尚未激活 + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-7.2: B 运行后入边重置(A→B=waiting)- B 应等待,不重复运行', () => { + setEdgeStatus(edges, 'A', 'B', 'waiting'); + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'B')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('wait'); + }); + + it('DA-7.3: C→A active(循环驱动)- A 应能再次运行', () => { + setEdgeStatus(edges, 'start', 'A', 'skipped'); + setEdgeStatus(edges, 'C', 'A', 'active'); // 循环驱动 A 再次运行 + + const status = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'A')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + + expect(status).toBe('run'); + }); +}); diff --git a/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/toolcall.test.ts b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/toolcall.test.ts new file mode 100644 index 0000000000..f24a1f2a84 --- /dev/null +++ b/test/cases/service/core/workflow/dispatch/checkNodeRunStatus/toolcall.test.ts @@ -0,0 +1,663 @@ +import { describe, it, expect } from 'vitest'; +import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant'; +import { WorkflowQueue } from '@fastgpt/service/core/workflow/dispatch/index'; +import { createNode, createEdge, setEdgeStatus } from '../../utils'; + +describe('场景15: 工具调用 - 单工具场景', () => { + /** + * 工作流结构: + * + * start → Agent ──selectedTools──→ Tool1 ──→ End + * │ + * └──────────────────────────────→ End + * + * 预期分组: + * - Tool1: 组1[Agent→Tool1 (selectedTools handle)] + * - End: 组1[Agent→End], 组2[Tool1→End] + * + * 测试场景: + * 1. Agent调用Tool1: selectedTools边active, Tool1执行 → Tool1应该运行 + * 2. Agent不调用工具: selectedTools边skipped, 直接到End → End应该运行 + * 3. Tool1执行完成: Tool1→End active, Agent→End active → End应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent', FlowNodeTypeEnum.toolCall), + createNode('Tool1', FlowNodeTypeEnum.httpRequest468), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent'), + createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), + createEdge('Agent', 'End', 'waiting', 'Agent-source-right', 'End-target-left'), + createEdge('Tool1', 'End') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('Agent调用Tool1: Tool1应该运行', () => { + // Agent决定调用Tool1 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'End', 'waiting'); + setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); + + // 验证Tool1节点状态 + const statusTool1 = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusTool1).toBe('run'); + + // 验证End节点状态(还在等待) + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('wait'); + }); + + it('Agent不调用工具: End应该运行', () => { + // Agent决定不调用工具 + setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); + setEdgeStatus(edges, 'Agent', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'skipped'); + + // 验证Tool1节点状态(被跳过) + const statusTool1 = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusTool1).toBe('skip'); + + // 验证End节点状态 + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('run'); + }); + + it('Tool1执行完成: End应该运行', () => { + // Agent调用Tool1,Tool1执行完成 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'active'); + + // 验证End节点状态 + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('run'); + }); +}); + +describe('场景16: 工具调用 - 多工具并行场景', () => { + /** + * 工作流结构: + * + * ┌──selectedTools──→ Tool1 ──┐ + * start → Agent ─┼──selectedTools──→ Tool2 ──┼──→ End + * └──selectedTools──→ Tool3 ──┘ + * │ + * └────────────────────────────────→ End + * + * 预期分组: + * - Tool1: 组1[Agent→Tool1 (selectedTools)] + * - Tool2: 组1[Agent→Tool2 (selectedTools)] + * - Tool3: 组1[Agent→Tool3 (selectedTools)] + * - End: 组1[Agent→End], 组2[Tool1→End], 组3[Tool2→End], 组4[Tool3→End] + * + * 测试场景: + * 1. Agent调用所有工具: 所有selectedTools边active → 所有Tool都应该运行 + * 2. Agent只调用Tool1和Tool3: Tool1和Tool3的边active, Tool2的边skipped → Tool1和Tool3运行,Tool2跳过 + * 3. 所有工具执行完成: 所有Tool→End边active, Agent→End边active → End应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent', FlowNodeTypeEnum.toolCall), + createNode('Tool1', FlowNodeTypeEnum.httpRequest468), + createNode('Tool2', FlowNodeTypeEnum.httpRequest468), + createNode('Tool3', FlowNodeTypeEnum.httpRequest468), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent'), + createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), + createEdge('Agent', 'Tool2', 'waiting', 'Agent-source-selectedTools', 'Tool2-target-left'), + createEdge('Agent', 'Tool3', 'waiting', 'Agent-source-selectedTools', 'Tool3-target-left'), + createEdge('Agent', 'End', 'waiting', 'Agent-source-right', 'End-target-left'), + createEdge('Tool1', 'End'), + createEdge('Tool2', 'End'), + createEdge('Tool3', 'End') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('Agent调用所有工具: 所有Tool都应该运行', () => { + // Agent决定调用所有工具 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool2', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); + setEdgeStatus(edges, 'Agent', 'End', 'waiting'); + + // 验证所有Tool节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool2')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool3')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + + // 验证End节点状态(还在等待) + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('wait'); + }); + + it('Agent只调用Tool1和Tool3: Tool1和Tool3运行,Tool2跳过', () => { + // Agent只调用Tool1和Tool3 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool2', 'skipped'); + setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); + setEdgeStatus(edges, 'Agent', 'End', 'waiting'); + setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); + + // 验证Tool节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool2')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool3')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); + + it('所有工具执行完成: End应该运行', () => { + // 所有工具执行完成 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool2', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool3', 'active'); + setEdgeStatus(edges, 'Agent', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'active'); + setEdgeStatus(edges, 'Tool2', 'End', 'active'); + setEdgeStatus(edges, 'Tool3', 'End', 'active'); + + // 验证End节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); +}); + +describe('场景17: 工具调用 - 嵌套工具调用场景', () => { + /** + * 工作流结构: + * + * ┌──selectedTools──→ SubTool1 ──┐ + * start → Agent1 ──selectedTools──→ Agent2 ├──→ End + * │ └──────────────────────────────┘ + * └────────────────────────────────────────────────────────────→ End + * + * 预期分组: + * - Agent2: 组1[Agent1→Agent2 (selectedTools)] + * - SubTool1: 组1[Agent2→SubTool1 (selectedTools)] + * - End: 组1[Agent1→End], 组2[Agent2→End], 组3[SubTool1→End] + * + * 测试场景: + * 1. Agent1调用Agent2: Agent1→Agent2边active → Agent2应该运行 + * 2. Agent2调用SubTool1: Agent2→SubTool1边active → SubTool1应该运行 + * 3. Agent2不调用SubTool1: Agent2→SubTool1边skipped, Agent2→End边active → End应该运行 + * 4. 所有工具执行完成: 所有边active → End应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent1', FlowNodeTypeEnum.toolCall), + createNode('Agent2', FlowNodeTypeEnum.toolCall), + createNode('SubTool1', FlowNodeTypeEnum.httpRequest468), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent1'), + createEdge('Agent1', 'Agent2', 'waiting', 'Agent1-source-selectedTools', 'Agent2-target-left'), + createEdge('Agent1', 'End', 'waiting', 'Agent1-source-right', 'End-target-left'), + createEdge( + 'Agent2', + 'SubTool1', + 'waiting', + 'Agent2-source-selectedTools', + 'SubTool1-target-left' + ), + createEdge('Agent2', 'End', 'waiting', 'Agent2-source-right', 'End-target-left'), + createEdge('SubTool1', 'End') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('Agent1调用Agent2: Agent2应该运行', () => { + // Agent1调用Agent2 + setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); + + // 验证Agent2节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Agent2')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); + + it('Agent2调用SubTool1: SubTool1应该运行', () => { + // Agent1调用Agent2,Agent2调用SubTool1 + setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); + setEdgeStatus(edges, 'Agent2', 'SubTool1', 'active'); + setEdgeStatus(edges, 'Agent2', 'End', 'waiting'); + + // 验证SubTool1节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'SubTool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); + + it('Agent2不调用SubTool1: End应该运行', () => { + // Agent1调用Agent2,Agent2不调用SubTool1 + setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'active'); + setEdgeStatus(edges, 'Agent2', 'SubTool1', 'skipped'); + setEdgeStatus(edges, 'Agent2', 'End', 'active'); + setEdgeStatus(edges, 'SubTool1', 'End', 'skipped'); + + // 验证End节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); + + it('所有工具执行完成: End应该运行', () => { + // 所有工具执行完成 + setEdgeStatus(edges, 'Agent1', 'Agent2', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'active'); + setEdgeStatus(edges, 'Agent2', 'SubTool1', 'active'); + setEdgeStatus(edges, 'Agent2', 'End', 'active'); + setEdgeStatus(edges, 'SubTool1', 'End', 'active'); + + // 验证End节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); +}); + +describe('场景18: 工具调用 - 工具与分支结合场景', () => { + /** + * 工作流结构: + * + * ┌──selectedTools──→ Tool1 ──┐ + * start → Agent ─┤ ├──→ IfElse ──if──→ End1 + * └──────────────────────────→ ┘ │ + * └─else─→ End2 + * + * 预期分组: + * - Tool1: 组1[Agent→Tool1 (selectedTools)] + * - IfElse: 组1[Agent→IfElse], 组2[Tool1→IfElse] + * - End1: 组1[IfElse→End1 (if handle)] + * - End2: 组1[IfElse→End2 (else handle)] + * + * 测试场景: + * 1. Agent调用Tool1,Tool1执行完成,IfElse走if分支 → End1应该运行 + * 2. Agent不调用Tool1,IfElse走else分支 → End2应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent', FlowNodeTypeEnum.toolCall), + createNode('Tool1', FlowNodeTypeEnum.httpRequest468), + createNode('IfElse', FlowNodeTypeEnum.ifElseNode), + createNode('End1', FlowNodeTypeEnum.chatNode), + createNode('End2', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent'), + createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), + createEdge('Agent', 'IfElse', 'waiting', 'Agent-source-right', 'IfElse-target-left'), + createEdge('Tool1', 'IfElse'), + createEdge('IfElse', 'End1', 'waiting', 'IfElse-source-if', 'End1-target-left'), + createEdge('IfElse', 'End2', 'waiting', 'IfElse-source-else', 'End2-target-left') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('Agent调用Tool1,IfElse走if分支: End1应该运行', () => { + // Agent调用Tool1,Tool1执行完成 + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); + setEdgeStatus(edges, 'Tool1', 'IfElse', 'active'); + + // IfElse走if分支 + setEdgeStatus(edges, 'IfElse', 'End1', 'active'); + setEdgeStatus(edges, 'IfElse', 'End2', 'skipped'); + + // 验证End1节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + + // 验证End2节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End2')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + }); + + it('Agent不调用Tool1,IfElse走else分支: End2应该运行', () => { + // Agent不调用Tool1 + setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); + setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); + setEdgeStatus(edges, 'Tool1', 'IfElse', 'skipped'); + + // IfElse走else分支 + setEdgeStatus(edges, 'IfElse', 'End1', 'skipped'); + setEdgeStatus(edges, 'IfElse', 'End2', 'active'); + + // 验证End1节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('skip'); + + // 验证End2节点状态 + expect( + WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End2')!, + nodeEdgeGroupsMap: edgeGroupsMap + }) + ).toBe('run'); + }); +}); + +describe('场景19: 工具调用 - 工具调用与循环结合', () => { + /** + * 工作流结构: + * + * ┌──selectedTools──→ Tool1 ──┐ + * start → Agent ─┤ ├──→ IfElse ──if──→ End + * └──────────────────────────→ ┘ │ + * └─else─→ Agent (循环) + * + * 预期分组: + * - Agent: 组1[start→Agent], 组2[IfElse→Agent (循环边)] + * - Tool1: 组1[Agent→Tool1 (selectedTools)] + * - IfElse: 组1[Agent→IfElse], 组2[Tool1→IfElse] + * - End: 组1[IfElse→End (if handle)] + * + * 测试场景: + * 1. 第一次执行:Agent 调用 Tool1 → Tool1 应该运行 + * 2. 循环执行:IfElse 走 else 分支回到 Agent → Agent 应该运行 + * 3. 循环中再次调用工具:验证 Tool1 可以再次运行 + * 4. 循环中不调用工具:直接走到 IfElse + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent', FlowNodeTypeEnum.toolCall), + createNode('Tool1', FlowNodeTypeEnum.httpRequest468), + createNode('IfElse', FlowNodeTypeEnum.ifElseNode), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent'), + createEdge('Agent', 'Tool1', 'waiting', 'Agent-source-selectedTools', 'Tool1-target-left'), + createEdge('Agent', 'IfElse', 'waiting', 'Agent-source-right', 'IfElse-target-left'), + createEdge('Tool1', 'IfElse'), + createEdge('IfElse', 'End', 'waiting', 'IfElse-source-if', 'End-target-left'), + createEdge('IfElse', 'Agent', 'waiting', 'IfElse-source-else', 'Agent-target-left') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('Agent 节点应该分成 2 组', () => { + const groups = edgeGroupsMap.get('Agent') || []; + expect(groups.length).toBe(2); + }); + + it('第一次执行:Agent 调用 Tool1,Tool1 应该运行', () => { + setEdgeStatus(edges, 'start', 'Agent', 'active'); + setEdgeStatus(edges, 'IfElse', 'Agent', 'waiting'); + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'IfElse', 'waiting'); + + const statusTool1 = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusTool1).toBe('run'); + }); + + it('循环执行:IfElse 走 else 分支回到 Agent,Agent 应该运行', () => { + setEdgeStatus(edges, 'start', 'Agent', 'skipped'); + setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); + setEdgeStatus(edges, 'IfElse', 'End', 'skipped'); + + const statusAgent = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Agent')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusAgent).toBe('run'); + }); + + it('循环中再次调用工具:Tool1 应该运行', () => { + setEdgeStatus(edges, 'start', 'Agent', 'skipped'); + setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent', 'IfElse', 'waiting'); + + const statusTool1 = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'Tool1')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusTool1).toBe('run'); + }); + + it('循环中不调用工具:IfElse 应该运行', () => { + setEdgeStatus(edges, 'start', 'Agent', 'skipped'); + setEdgeStatus(edges, 'IfElse', 'Agent', 'active'); + setEdgeStatus(edges, 'Agent', 'Tool1', 'skipped'); + setEdgeStatus(edges, 'Agent', 'IfElse', 'active'); + setEdgeStatus(edges, 'Tool1', 'IfElse', 'skipped'); + + const statusIfElse = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'IfElse')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusIfElse).toBe('run'); + }); +}); + +describe('场景20: 工具调用 - 多 Agent 并行调用工具后汇聚', () => { + /** + * 工作流结构: + * + * ┌──→ Agent1 ──selectedTools──→ Tool1 ──┐ + * start ┤ ├──→ End + * └──→ Agent2 ──selectedTools──→ Tool2 ──┘ + * + * 预期分组: + * - Agent1: 组1[start→Agent1] + * - Agent2: 组1[start→Agent2] + * - Tool1: 组1[Agent1→Tool1 (selectedTools)] + * - Tool2: 组1[Agent2→Tool2 (selectedTools)] + * - End: 组1[Agent1→End, Tool1→End, Agent2→End, Tool2→End] (并行汇聚,所有边在同一组) + * + * 测试场景: + * 1. 两个 Agent 都调用工具:End 应该等待所有工具完成 + * 2. Agent1 调用工具,Agent2 不调用:End 应该等待 Tool1 完成 + * 3. 都不调用工具:End 应该直接运行 + * 4. 所有工具执行完成:End 应该运行 + */ + + const nodes = [ + createNode('start', FlowNodeTypeEnum.workflowStart), + createNode('Agent1', FlowNodeTypeEnum.toolCall), + createNode('Agent2', FlowNodeTypeEnum.toolCall), + createNode('Tool1', FlowNodeTypeEnum.httpRequest468), + createNode('Tool2', FlowNodeTypeEnum.httpRequest468), + createNode('End', FlowNodeTypeEnum.chatNode) + ]; + + const edges = [ + createEdge('start', 'Agent1'), + createEdge('start', 'Agent2'), + createEdge('Agent1', 'Tool1', 'waiting', 'Agent1-source-selectedTools', 'Tool1-target-left'), + createEdge('Agent1', 'End', 'waiting', 'Agent1-source-right', 'End-target-left'), + createEdge('Agent2', 'Tool2', 'waiting', 'Agent2-source-selectedTools', 'Tool2-target-left'), + createEdge('Agent2', 'End', 'waiting', 'Agent2-source-right', 'End-target-left'), + createEdge('Tool1', 'End'), + createEdge('Tool2', 'End') + ]; + + const edgeIndex = WorkflowQueue.buildEdgeIndex({ runtimeEdges: edges }); + const edgeGroupsMap = WorkflowQueue.buildNodeEdgeGroupsMap({ + runtimeNodes: nodes, + edgeIndex + }); + + it('End 节点应该只有 1 组(并行汇聚)', () => { + const groups = edgeGroupsMap.get('End') || []; + expect(groups.length).toBe(1); + }); + + it('两个 Agent 都调用工具:End 应该等待', () => { + setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); + setEdgeStatus(edges, 'Agent2', 'Tool2', 'active'); + setEdgeStatus(edges, 'Agent2', 'End', 'waiting'); + setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); + setEdgeStatus(edges, 'Tool2', 'End', 'waiting'); + + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('wait'); + }); + + it('Agent1 调用工具,Agent2 不调用:End 应该等待 Tool1', () => { + setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'waiting'); + setEdgeStatus(edges, 'Agent2', 'Tool2', 'skipped'); + setEdgeStatus(edges, 'Agent2', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'waiting'); + setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); + + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('wait'); + }); + + it('都不调用工具:End 应该运行', () => { + setEdgeStatus(edges, 'Agent1', 'Tool1', 'skipped'); + setEdgeStatus(edges, 'Agent1', 'End', 'active'); + setEdgeStatus(edges, 'Agent2', 'Tool2', 'skipped'); + setEdgeStatus(edges, 'Agent2', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'skipped'); + setEdgeStatus(edges, 'Tool2', 'End', 'skipped'); + + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('run'); + }); + + it('所有工具执行完成:End 应该运行', () => { + setEdgeStatus(edges, 'Agent1', 'Tool1', 'active'); + setEdgeStatus(edges, 'Agent1', 'End', 'active'); + setEdgeStatus(edges, 'Agent2', 'Tool2', 'active'); + setEdgeStatus(edges, 'Agent2', 'End', 'active'); + setEdgeStatus(edges, 'Tool1', 'End', 'active'); + setEdgeStatus(edges, 'Tool2', 'End', 'active'); + + const statusEnd = WorkflowQueue.getNodeRunStatus({ + node: nodes.find((n) => n.nodeId === 'End')!, + nodeEdgeGroupsMap: edgeGroupsMap + }); + expect(statusEnd).toBe('run'); + }); +}); diff --git a/test/cases/global/core/workflow/dispatch/index.test.ts b/test/cases/service/core/workflow/dispatch/index.test.ts similarity index 100% rename from test/cases/global/core/workflow/dispatch/index.test.ts rename to test/cases/service/core/workflow/dispatch/index.test.ts diff --git a/test/cases/global/core/workflow/utils.ts b/test/cases/service/core/workflow/utils.ts similarity index 76% rename from test/cases/global/core/workflow/utils.ts rename to test/cases/service/core/workflow/utils.ts index f4a00f4770..0fd79f941e 100644 --- a/test/cases/global/core/workflow/utils.ts +++ b/test/cases/service/core/workflow/utils.ts @@ -31,3 +31,15 @@ export const createEdge = ( sourceHandle: sourceHandle || `${source}-source-right`, targetHandle: targetHandle || `${target}-target-left` }); + +export const setEdgeStatus = ( + edges: RuntimeEdgeItemType[], + source: string, + target: string, + status: 'active' | 'waiting' | 'skipped' +) => { + const edge = edges.find((e) => e.source === source && e.target === target); + if (edge) { + edge.status = status; + } +}; diff --git a/test/cases/service/core/workflow/utils/tarjan.test.ts b/test/cases/service/core/workflow/utils/tarjan.test.ts new file mode 100644 index 0000000000..70558aa7bc --- /dev/null +++ b/test/cases/service/core/workflow/utils/tarjan.test.ts @@ -0,0 +1,438 @@ +import { describe, it, expect } from 'vitest'; +import type { RuntimeNodeItemType } from '@fastgpt/global/core/workflow/runtime/type'; +import type { RuntimeEdgeItemType } from '@fastgpt/global/core/workflow/type/edge'; +import { + findSCCs, + isNodeInCycle, + classifyEdgesByDFS, + getEdgeType +} from '@fastgpt/service/core/workflow/utils/tarjan'; +import type { EdgeIndex } from '@fastgpt/service/core/workflow/utils/tarjan'; + +// --- 辅助函数 --- + +function makeNode(nodeId: string): RuntimeNodeItemType { + return { nodeId } as RuntimeNodeItemType; +} + +function makeEdge(source: string, target: string, sourceHandle?: string): RuntimeEdgeItemType { + return { + source, + sourceHandle: sourceHandle ?? `${source}-output`, + target, + targetHandle: `${target}-input`, + status: 'active' + }; +} + +function buildEdgeIndex(edges: RuntimeEdgeItemType[]): EdgeIndex { + const bySource = new Map(); + const byTarget = new Map(); + + for (const edge of edges) { + if (!bySource.has(edge.source)) bySource.set(edge.source, []); + bySource.get(edge.source)!.push(edge); + + if (!byTarget.has(edge.target)) byTarget.set(edge.target, []); + byTarget.get(edge.target)!.push(edge); + } + + return { bySource, byTarget }; +} + +// =========================== +// findSCCs +// =========================== + +describe('findSCCs', () => { + it('单个节点无边 → 自身为独立 SCC,大小为 1', () => { + const nodes = [makeNode('A')]; + const edgeIndex = buildEdgeIndex([]); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + expect(nodeToSCC.has('A')).toBe(true); + const sccId = nodeToSCC.get('A')!; + expect(sccSizes.get(sccId)).toBe(1); + }); + + it('两节点无边 → 两个独立 SCC', () => { + const nodes = [makeNode('A'), makeNode('B')]; + const edgeIndex = buildEdgeIndex([]); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + expect(nodeToSCC.get('A')).not.toBe(nodeToSCC.get('B')); + expect(sccSizes.get(nodeToSCC.get('A')!)).toBe(1); + expect(sccSizes.get(nodeToSCC.get('B')!)).toBe(1); + }); + + it('线性链 A→B→C → 三个独立 SCC,无循环', () => { + const nodes = [makeNode('A'), makeNode('B'), makeNode('C')]; + const edges = [makeEdge('A', 'B'), makeEdge('B', 'C')]; + const edgeIndex = buildEdgeIndex(edges); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + // 各节点属于不同 SCC + expect(nodeToSCC.get('A')).not.toBe(nodeToSCC.get('B')); + expect(nodeToSCC.get('B')).not.toBe(nodeToSCC.get('C')); + + sccSizes.forEach((size) => { + expect(size).toBe(1); + }); + }); + + it('简单二节点循环 A→B→A → 同一个 SCC,大小为 2', () => { + const nodes = [makeNode('A'), makeNode('B')]; + const edges = [makeEdge('A', 'B'), makeEdge('B', 'A')]; + const edgeIndex = buildEdgeIndex(edges); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + expect(nodeToSCC.get('A')).toBe(nodeToSCC.get('B')); + const sccId = nodeToSCC.get('A')!; + expect(sccSizes.get(sccId)).toBe(2); + }); + + it('三节点循环 A→B→C→A → 同一个 SCC,大小为 3', () => { + const nodes = [makeNode('A'), makeNode('B'), makeNode('C')]; + const edges = [makeEdge('A', 'B'), makeEdge('B', 'C'), makeEdge('C', 'A')]; + const edgeIndex = buildEdgeIndex(edges); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + expect(nodeToSCC.get('A')).toBe(nodeToSCC.get('B')); + expect(nodeToSCC.get('B')).toBe(nodeToSCC.get('C')); + const sccId = nodeToSCC.get('A')!; + expect(sccSizes.get(sccId)).toBe(3); + }); + + it('自环节点 A→A → 自身 SCC,大小为 1(Tarjan 标准行为:自环不影响 SCC 大小)', () => { + const nodes = [makeNode('A')]; + const edges = [makeEdge('A', 'A')]; + const edgeIndex = buildEdgeIndex(edges); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + expect(nodeToSCC.has('A')).toBe(true); + const sccId = nodeToSCC.get('A')!; + // 自环时 lowLink == discoveryTime,SCC 只含自身,大小为 1 + expect(sccSizes.get(sccId)).toBe(1); + }); + + it('混合图:一个循环 + 一个独立节点', () => { + const nodes = [makeNode('A'), makeNode('B'), makeNode('D')]; + const edges = [makeEdge('A', 'B'), makeEdge('B', 'A')]; // D 孤立 + const edgeIndex = buildEdgeIndex(edges); + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + + // A、B 在同一 SCC + expect(nodeToSCC.get('A')).toBe(nodeToSCC.get('B')); + expect(sccSizes.get(nodeToSCC.get('A')!)).toBe(2); + + // D 独立 + expect(nodeToSCC.get('D')).not.toBe(nodeToSCC.get('A')); + expect(sccSizes.get(nodeToSCC.get('D')!)).toBe(1); + }); + + it('空节点列表 → 空结果', () => { + const { nodeToSCC, sccSizes } = findSCCs([], buildEdgeIndex([])); + expect(nodeToSCC.size).toBe(0); + expect(sccSizes.size).toBe(0); + }); +}); + +// =========================== +// isNodeInCycle +// =========================== + +describe('isNodeInCycle', () => { + it('SCC 大小 > 1 → 节点在循环中', () => { + const nodeToSCC = new Map([ + ['A', 0], + ['B', 0] + ]); + const sccSizes = new Map([[0, 2]]); + expect(isNodeInCycle('A', nodeToSCC, sccSizes)).toBe(true); + expect(isNodeInCycle('B', nodeToSCC, sccSizes)).toBe(true); + }); + + it('SCC 大小 == 1 → 节点不在循环中', () => { + const nodeToSCC = new Map([['A', 0]]); + const sccSizes = new Map([[0, 1]]); + expect(isNodeInCycle('A', nodeToSCC, sccSizes)).toBe(false); + }); + + it('节点不在 nodeToSCC 中 → 返回 false', () => { + const nodeToSCC = new Map(); + const sccSizes = new Map(); + expect(isNodeInCycle('X', nodeToSCC, sccSizes)).toBe(false); + }); + + it('sccSizes 中无对应 SCC ID → 视为大小 0,返回 false', () => { + const nodeToSCC = new Map([['A', 99]]); + const sccSizes = new Map(); // 无 sccId=99 + expect(isNodeInCycle('A', nodeToSCC, sccSizes)).toBe(false); + }); +}); + +// =========================== +// classifyEdgesByDFS +// =========================== + +describe('classifyEdgesByDFS', () => { + it('线性链 A→B→C → 全部是树边', () => { + const nodes = [makeNode('A'), makeNode('B'), makeNode('C')]; + const edgeAB = makeEdge('A', 'B'); + const edgeBC = makeEdge('B', 'C'); + const edgeIndex = buildEdgeIndex([edgeAB, edgeBC]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + const keyAB = `${edgeAB.source}-${edgeAB.target}-${edgeAB.sourceHandle}`; + const keyBC = `${edgeBC.source}-${edgeBC.target}-${edgeBC.sourceHandle}`; + expect(edgeTypes.get(keyAB)).toBe('tree'); + expect(edgeTypes.get(keyBC)).toBe('tree'); + }); + + it('有回边时 → 回边被标记为 back', () => { + // A → B → C → B(回边) + const nodes = [makeNode('A'), makeNode('B'), makeNode('C')]; + const edgeAB = makeEdge('A', 'B'); + const edgeBC = makeEdge('B', 'C'); + const edgeCB = makeEdge('C', 'B'); // 回边 + const edgeIndex = buildEdgeIndex([edgeAB, edgeBC, edgeCB]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + const keyCB = `${edgeCB.source}-${edgeCB.target}-${edgeCB.sourceHandle}`; + expect(edgeTypes.get(keyCB)).toBe('back'); + }); + + it('从入口节点到已完成后代的非树边 → 前向边', () => { + // A → B, A → C, B → C (A→C 是前向边) + const nodes = [makeNode('A'), makeNode('B'), makeNode('C')]; + const edgeAB = makeEdge('A', 'B', 'h1'); + const edgeBC = makeEdge('B', 'C', 'h1'); + const edgeAC = makeEdge('A', 'C', 'h2'); // 前向边 + const edgeIndex = buildEdgeIndex([edgeAB, edgeBC, edgeAC]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + const keyAC = `${edgeAC.source}-${edgeAC.target}-${edgeAC.sourceHandle}`; + // A→C 应该是 forward 边(A 发现时间早于 C) + expect(edgeTypes.get(keyAC)).toBe('forward'); + }); + + it('跨边 → 两棵子树之间的边', () => { + // 两条独立链,再加一条跨接边 + // A → B (树边) + // C → D (树边) + // B → D (先访问 C→D 路径,D 已 finished,B→D 是 cross) + const nodes = [makeNode('A'), makeNode('C'), makeNode('B'), makeNode('D')]; + const edgeAB = makeEdge('A', 'B'); + const edgeCD = makeEdge('C', 'D'); + const edgeBD = makeEdge('B', 'D'); // cross 边(DFS 从 A 出发先走 A→B→D,然后 C→D 是 cross) + // 注:具体是 forward 还是 cross 取决于 DFS 顺序,这里关键是没有 back + const edgeIndex = buildEdgeIndex([edgeAB, edgeCD, edgeBD]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + // 确认没有 back 边(无循环) + for (const type of edgeTypes.values()) { + expect(type).not.toBe('back'); + } + }); + + it('孤立节点(无入边无出边) → 不产生任何边类型', () => { + const nodes = [makeNode('A'), makeNode('B')]; // 无边 + const edgeIndex = buildEdgeIndex([]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + expect(edgeTypes.size).toBe(0); + }); + + it('多个入口节点 → 各自的链均被 DFS 访问', () => { + // A → B, C → D (两条独立链) + const nodes = [makeNode('A'), makeNode('B'), makeNode('C'), makeNode('D')]; + const edgeAB = makeEdge('A', 'B'); + const edgeCD = makeEdge('C', 'D'); + const edgeIndex = buildEdgeIndex([edgeAB, edgeCD]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + const keyAB = `${edgeAB.source}-${edgeAB.target}-${edgeAB.sourceHandle}`; + const keyCD = `${edgeCD.source}-${edgeCD.target}-${edgeCD.sourceHandle}`; + expect(edgeTypes.get(keyAB)).toBe('tree'); + expect(edgeTypes.get(keyCD)).toBe('tree'); + }); + + it('sourceHandle 为 undefined → 使用 "default" 作为 key', () => { + const nodes = [makeNode('A'), makeNode('B')]; + const edge: RuntimeEdgeItemType = { + source: 'A', + sourceHandle: undefined as any, // 模拟 undefined + target: 'B', + targetHandle: 'B-input', + status: 'active' + }; + const edgeIndex = buildEdgeIndex([edge]); + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + + const keyWithDefault = 'A-B-default'; + expect(edgeTypes.get(keyWithDefault)).toBe('tree'); + }); +}); + +// =========================== +// getEdgeType +// =========================== + +describe('getEdgeType', () => { + it('返回边对应的类型', () => { + const edge = makeEdge('A', 'B', 'h1'); + const edgeKey = `A-B-h1`; + const edgeTypes = new Map([[edgeKey, 'tree' as const]]); + expect(getEdgeType(edge, edgeTypes)).toBe('tree'); + }); + + it('边不在 map 中 → 返回 undefined', () => { + const edge = makeEdge('X', 'Y', 'hX'); + const edgeTypes = new Map(); + expect(getEdgeType(edge, edgeTypes)).toBeUndefined(); + }); + + it('sourceHandle 为 undefined → key 使用 "default"', () => { + const edge: RuntimeEdgeItemType = { + source: 'A', + sourceHandle: undefined as any, + target: 'B', + targetHandle: 'B-input', + status: 'active' + }; + const edgeKey = 'A-B-default'; + const edgeTypes = new Map([[edgeKey, 'back' as const]]); + expect(getEdgeType(edge, edgeTypes)).toBe('back'); + }); + + it('各种边类型均可正确返回', () => { + const edgeTypes = new Map([ + ['A-B-h1', 'tree'], + ['B-A-h2', 'back'], + ['A-C-h3', 'forward'], + ['D-C-h4', 'cross'] + ]); + + expect(getEdgeType(makeEdge('A', 'B', 'h1'), edgeTypes)).toBe('tree'); + expect(getEdgeType(makeEdge('B', 'A', 'h2'), edgeTypes)).toBe('back'); + expect(getEdgeType(makeEdge('A', 'C', 'h3'), edgeTypes)).toBe('forward'); + expect(getEdgeType(makeEdge('D', 'C', 'h4'), edgeTypes)).toBe('cross'); + }); +}); + +// =========================== +// 安全边界检测 +// =========================== + +describe('安全边界检测', () => { + describe('findSCCs - 边界输入', () => { + it('边引用了不在节点列表中的 target → 不崩溃,已声明节点均有 SCC', () => { + // 节点列表只有 A,但边 A→Ghost 的 target "Ghost" 不在列表里 + const nodes = [makeNode('A')]; + const edges = [makeEdge('A', 'Ghost')]; + const edgeIndex = buildEdgeIndex(edges); + + // 不应抛出异常 + expect(() => findSCCs(nodes, edgeIndex)).not.toThrow(); + + const { nodeToSCC } = findSCCs(nodes, edgeIndex); + // 已声明的节点 A 必须有 SCC 分配 + expect(nodeToSCC.has('A')).toBe(true); + }); + + it('边引用了不在节点列表中的 source(edgeIndex.bySource 含幽灵节点)→ 不崩溃', () => { + // 节点列表只有 B,但 bySource 里有从 "Ghost" 出发的边 + const nodes = [makeNode('B')]; + const ghostEdge = makeEdge('Ghost', 'B'); + const edgeIndex = buildEdgeIndex([ghostEdge]); + + expect(() => findSCCs(nodes, edgeIndex)).not.toThrow(); + + const { nodeToSCC } = findSCCs(nodes, edgeIndex); + expect(nodeToSCC.has('B')).toBe(true); + }); + + it('重复节点 ID → 后续重复节点因 discoveryTime 已存在而被跳过,不崩溃', () => { + const nodes = [makeNode('A'), makeNode('A')]; // 重复 + const edgeIndex = buildEdgeIndex([]); + + expect(() => findSCCs(nodes, edgeIndex)).not.toThrow(); + + const { nodeToSCC } = findSCCs(nodes, edgeIndex); + expect(nodeToSCC.has('A')).toBe(true); + }); + + it('边 source/target 为空字符串 → 不崩溃,空字符串节点被当作普通节点 ID', () => { + const nodes = [makeNode('A'), makeNode('')]; + const edge = makeEdge('A', ''); + const edgeIndex = buildEdgeIndex([edge]); + + expect(() => findSCCs(nodes, edgeIndex)).not.toThrow(); + }); + + it('较深的线性链(500 节点)→ 不应栈溢出', () => { + const SIZE = 500; + const nodes = Array.from({ length: SIZE }, (_, i) => makeNode(`N${i}`)); + const edges = Array.from({ length: SIZE - 1 }, (_, i) => makeEdge(`N${i}`, `N${i + 1}`)); + const edgeIndex = buildEdgeIndex(edges); + + expect(() => findSCCs(nodes, edgeIndex)).not.toThrow(); + + const { nodeToSCC, sccSizes } = findSCCs(nodes, edgeIndex); + expect(nodeToSCC.size).toBe(SIZE); + // 全部独立 SCC,每个大小为 1 + sccSizes.forEach((size) => expect(size).toBe(1)); + }); + }); + + describe('classifyEdgesByDFS - 边界输入', () => { + it('边引用了不在节点列表中的 target → 不崩溃', () => { + const nodes = [makeNode('A')]; + const edges = [makeEdge('A', 'Ghost')]; + const edgeIndex = buildEdgeIndex(edges); + + expect(() => classifyEdgesByDFS(nodes, edgeIndex)).not.toThrow(); + }); + + it('边 source/target 为空字符串 → 不崩溃', () => { + const nodes = [makeNode(''), makeNode('A')]; + const edge = makeEdge('', 'A'); + const edgeIndex = buildEdgeIndex([edge]); + + expect(() => classifyEdgesByDFS(nodes, edgeIndex)).not.toThrow(); + }); + + it('较深的线性链(500 节点)→ 不应栈溢出', () => { + const SIZE = 500; + const nodes = Array.from({ length: SIZE }, (_, i) => makeNode(`N${i}`)); + const edges = Array.from({ length: SIZE - 1 }, (_, i) => makeEdge(`N${i}`, `N${i + 1}`)); + const edgeIndex = buildEdgeIndex(edges); + + expect(() => classifyEdgesByDFS(nodes, edgeIndex)).not.toThrow(); + + const edgeTypes = classifyEdgesByDFS(nodes, edgeIndex); + // 线性链全是树边 + edgeTypes.forEach((type) => expect(type).toBe('tree')); + }); + + it('重复节点 ID → 不崩溃', () => { + const nodes = [makeNode('A'), makeNode('A'), makeNode('B')]; + const edges = [makeEdge('A', 'B')]; + const edgeIndex = buildEdgeIndex(edges); + + expect(() => classifyEdgesByDFS(nodes, edgeIndex)).not.toThrow(); + }); + }); + + describe('isNodeInCycle - 边界输入', () => { + it('nodeId 为空字符串 → 正常查找,不崩溃', () => { + const nodeToSCC = new Map([['', 0]]); + const sccSizes = new Map([[0, 2]]); + expect(() => isNodeInCycle('', nodeToSCC, sccSizes)).not.toThrow(); + expect(isNodeInCycle('', nodeToSCC, sccSizes)).toBe(true); + }); + + it('空的 map → 返回 false,不崩溃', () => { + expect(isNodeInCycle('anything', new Map(), new Map())).toBe(false); + }); + }); +});