V4.9.7 feature (#4669)

* update doc

* feat: Add coupon redemption feature for team subscriptions (#4595)

* feat: Add coupon redemption feature for team subscriptions

- Introduced `TeamCouponSub` and `TeamCouponSchema` types
- Added `redeemCoupon` API endpoint
- Updated UI to include a modal for coupon redemption
- Added new icon and translations for "Redeem coupon"

* perf: remove field teamId

* perf: use dynamic import

* refactor: move to page component

* perf: coupon code

* perf: mcp server

* perf: test

* auto layout (#4634)

* fix 4.9.6 (#4631)

* fix debug quote list

* delete next text node match

* fix extract default boolean value

* export latest 100 chat items

* fix quote item ui

* doc

* fix doc

* feat: auto layout

* perf: auto layout

* fix: auto layout null

* add start node

---------

Co-authored-by: heheer <heheer@sealos.io>

* fix: share link (#4644)

* Add workflow run duration;Get audio duration (#4645)

* add duration

* get audio duration

* Custom config path (#4649)

* feat: 通过环境变量DATA_PATH获取配置文件目录 (#4622)

通过环境变量DATA_PATH获取配置文件目录,以应对不同的部署方式的多样化需求

* feat: custom configjson path

* doc

---------

Co-authored-by: John Chen <sss1991@163.com>

* 程序api调用场景下,如果大量调用带有图片或视频,产生的聊天记录会导致后台mongo数据库异常。这个修改给api客户端一个禁止生成聊天记录的选项,避免这个后果。 (#3964)

* update special chatId

* perf: vector db rename

* update operationLog (#4647)

* update operationLog

* combine operationLogMap

* solve operationI18nLogMap bug

* remoce log

* feat: Rerank usage (#4654)

* refresh concat when update (#4655)

* fix: refresh code

* perf: timer lock

* Fix operationLog (#4657)

* perf: http streamable mcp

* add alipay (#4630)

* perf: subplan ui

* perf: pay code

* hiden bank tip

* Fix: pay error (#4665)

* fix quote number (#4666)

* remove log

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: John Chen <sss1991@163.com>
Co-authored-by: gaord <bengao168@msn.com>
Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
This commit is contained in:
Archer
2025-04-26 16:17:21 +08:00
committed by GitHub
parent a669a60fe6
commit 0720bbe4da
143 changed files with 2067 additions and 1093 deletions

View File

@@ -2,7 +2,6 @@ import fs from 'fs';
import { getAxiosConfig } from '../config';
import axios from 'axios';
import FormData from 'form-data';
import { getSTTModel } from '../model';
import { STTModelType } from '@fastgpt/global/core/ai/model.d';
export const aiTranscriptions = async ({
@@ -24,7 +23,7 @@ export const aiTranscriptions = async ({
const aiAxiosConfig = getAxiosConfig();
const { data: result } = await axios<{ text: string }>({
const { data: result } = await axios<{ text: string; usage?: { total_tokens: number } }>({
method: 'post',
...(modelData.requestUrl
? { url: modelData.requestUrl }

View File

@@ -3,6 +3,7 @@ import { POST } from '../../../common/api/serverRequest';
import { getDefaultRerankModel } from '../model';
import { getAxiosConfig } from '../config';
import { RerankModelItemType } from '@fastgpt/global/core/ai/model.d';
import { countPromptTokens } from '../../../common/string/tiktoken';
type PostReRankResponse = {
id: string;
@@ -10,8 +11,17 @@ type PostReRankResponse = {
index: number;
relevance_score: number;
}[];
meta?: {
tokens: {
input_tokens: number;
output_tokens: number;
};
};
};
type ReRankCallResult = {
results: { id: string; score?: number }[];
inputTokens: number;
};
type ReRankCallResult = { id: string; score?: number }[];
export function reRankRecall({
model = getDefaultRerankModel(),
@@ -28,18 +38,22 @@ export function reRankRecall({
return Promise.reject('no rerank model');
}
if (documents.length === 0) {
return Promise.resolve([]);
return Promise.resolve({
results: [],
inputTokens: 0
});
}
const { baseUrl, authorization } = getAxiosConfig();
let start = Date.now();
const documentsTextArray = documents.map((doc) => doc.text);
return POST<PostReRankResponse>(
model.requestUrl ? model.requestUrl : `${baseUrl}/rerank`,
{
model: model.model,
query,
documents: documents.map((doc) => doc.text)
documents: documentsTextArray
},
{
headers: {
@@ -49,17 +63,22 @@ export function reRankRecall({
timeout: 30000
}
)
.then((data) => {
.then(async (data) => {
addLog.info('ReRank finish:', { time: Date.now() - start });
if (!data?.results || data?.results?.length === 0) {
addLog.error('ReRank error, empty result', data);
}
return data?.results?.map((item) => ({
id: documents[item.index].id,
score: item.relevance_score
}));
return {
results: data?.results?.map((item) => ({
id: documents[item.index].id,
score: item.relevance_score
})),
inputTokens:
data?.meta?.tokens?.input_tokens ||
(await countPromptTokens(documentsTextArray.join('\n') + query, ''))
};
})
.catch((err) => {
addLog.error('rerank error', err);

View File

@@ -2,50 +2,38 @@ import { Client } from '@modelcontextprotocol/sdk/client/index.js';
import { SSEClientTransport } from '@modelcontextprotocol/sdk/client/sse.js';
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
import { ToolType } from '@fastgpt/global/core/app/type';
import { addLog } from '../../common/system/log';
import { retryFn } from '@fastgpt/global/common/system/utils';
export class MCPClient {
private client: Client | null = null;
private client: Client;
private url: string;
constructor(config: { url: string }) {
this.url = config.url;
this.client = new Client({
name: 'FastGPT-MCP-client',
version: '1.0.0'
});
}
private async getConnection(): Promise<Client> {
if (this.client) {
return this.client;
}
try {
const client = new Client({
name: 'FastGPT-MCP-http-client',
version: '1.0.0'
});
const transport = new StreamableHTTPClientTransport(new URL(this.url));
await client.connect(transport);
this.client = client;
return client;
await this.client.connect(transport);
return this.client;
} catch (error) {
const client = new Client({
name: 'FastGPT-MCP-sse-client',
version: '1.0.0'
});
const sseTransport = new SSEClientTransport(new URL(this.url));
await client.connect(sseTransport);
this.client = client;
return client;
await this.client.connect(new SSEClientTransport(new URL(this.url)));
return this.client;
}
}
// 内部方法:关闭连接
private async closeConnection() {
if (this.client) {
try {
await this.client.close();
this.client = null;
} catch (error) {
console.error('Failed to close MCP client:', error);
}
try {
await retryFn(() => this.client.close(), 3);
} catch (error) {
addLog.error('[MCP Client] Failed to close connection:', error);
}
}
@@ -58,7 +46,11 @@ export class MCPClient {
const client = await this.getConnection();
const response = await client.listTools();
const tools = (response.tools || []).map((tool: any) => ({
if (!Array.isArray(response.tools)) {
return Promise.reject('[MCP Client] Get tools response is not an array');
}
const tools = response.tools.map((tool) => ({
name: tool.name,
description: tool.description || '',
inputSchema: tool.inputSchema || {
@@ -67,9 +59,10 @@ export class MCPClient {
}
}));
// @ts-ignore
return tools;
} catch (error) {
console.error('Failed to get MCP tools:', error);
addLog.error('[MCP Client] Failed to get tools:', error);
return Promise.reject(error);
} finally {
await this.closeConnection();
@@ -85,28 +78,17 @@ export class MCPClient {
public async toolCall(toolName: string, params: Record<string, any>): Promise<any> {
try {
const client = await this.getConnection();
console.log(`Call tool: ${toolName}`, params);
addLog.debug(`[MCP Client] Call tool: ${toolName}`, params);
const result = await client.callTool({
return await client.callTool({
name: toolName,
arguments: params
});
return result;
} catch (error) {
console.error(`Failed to call tool ${toolName}:`, error);
addLog.error(`[MCP Client] Failed to call tool ${toolName}:`, error);
return Promise.reject(error);
} finally {
await this.closeConnection();
}
}
}
/**
* Create MCP client
* @param config Client configuration, containing url
* @returns MCPClient instance
*/
export default function getMCPClient(config: { url: string }): MCPClient {
return new MCPClient(config);
}

View File

@@ -82,7 +82,8 @@ const ChatItemSchema = new Schema({
[DispatchNodeResponseKeyEnum.nodeResponse]: {
type: Array,
default: []
}
},
durationSeconds: Number
});
try {

View File

@@ -34,6 +34,7 @@ type Props = {
outLinkUid?: string;
content: [UserChatItemType & { dataId?: string }, AIChatItemType & { dataId?: string }];
metadata?: Record<string, any>;
durationSeconds: number; //s
};
export async function saveChat({
@@ -51,8 +52,11 @@ export async function saveChat({
shareId,
outLinkUid,
content,
durationSeconds,
metadata = {}
}: Props) {
if (!chatId || chatId === 'NO_RECORD_HISTORIES') return;
try {
const chat = await MongoChat.findOne(
{
@@ -78,34 +82,33 @@ export async function saveChat({
// Format save chat content: Remove quote q/a
const processedContent = content.map((item) => {
if (item.obj === ChatRoleEnum.AI) {
const nodeResponse = item[DispatchNodeResponseKeyEnum.nodeResponse];
const nodeResponse = item[DispatchNodeResponseKeyEnum.nodeResponse]?.map((responseItem) => {
if (
responseItem.moduleType === FlowNodeTypeEnum.datasetSearchNode &&
responseItem.quoteList
) {
return {
...responseItem,
quoteList: responseItem.quoteList.map((quote: any) => ({
id: quote.id,
chunkIndex: quote.chunkIndex,
datasetId: quote.datasetId,
collectionId: quote.collectionId,
sourceId: quote.sourceId,
sourceName: quote.sourceName,
score: quote.score,
tokens: quote.tokens
}))
};
}
return responseItem;
});
if (nodeResponse) {
return {
...item,
[DispatchNodeResponseKeyEnum.nodeResponse]: nodeResponse.map((responseItem) => {
if (
responseItem.moduleType === FlowNodeTypeEnum.datasetSearchNode &&
responseItem.quoteList
) {
return {
...responseItem,
quoteList: responseItem.quoteList.map((quote: any) => ({
id: quote.id,
chunkIndex: quote.chunkIndex,
datasetId: quote.datasetId,
collectionId: quote.collectionId,
sourceId: quote.sourceId,
sourceName: quote.sourceName,
score: quote.score,
tokens: quote.tokens
}))
};
}
return responseItem;
})
};
}
return {
...item,
[DispatchNodeResponseKeyEnum.nodeResponse]: nodeResponse,
durationSeconds
};
}
return item;
});
@@ -175,13 +178,15 @@ export const updateInteractiveChat = async ({
appId,
userInteractiveVal,
aiResponse,
newVariables
newVariables,
durationSeconds
}: {
chatId: string;
appId: string;
userInteractiveVal: string;
aiResponse: AIChatItemType & { dataId?: string };
newVariables?: Record<string, any>;
durationSeconds: number;
}) => {
if (!chatId) return;
@@ -246,6 +251,10 @@ export const updateInteractiveChat = async ({
chatItem.value = chatItem.value ? [...chatItem.value, ...aiResponse.value] : aiResponse.value;
}
chatItem.durationSeconds = chatItem.durationSeconds
? +(chatItem.durationSeconds + durationSeconds).toFixed(2)
: durationSeconds;
await mongoSessionRun(async (session) => {
await chatItem.save({ session });
await MongoChat.updateOne(

View File

@@ -9,7 +9,7 @@ import { DatasetCollectionSchemaType, DatasetSchemaType } from '@fastgpt/global/
import { MongoDatasetTraining } from '../training/schema';
import { MongoDatasetData } from '../data/schema';
import { delImgByRelatedId } from '../../../common/file/image/controller';
import { deleteDatasetDataVector } from '../../../common/vectorStore/controller';
import { deleteDatasetDataVector } from '../../../common/vectorDB/controller';
import { delFileByFileIdList } from '../../../common/file/gridfs/controller';
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
import { ClientSession } from '../../../common/mongo';

View File

@@ -5,7 +5,7 @@ import { delCollectionRelatedSource } from './collection/controller';
import { ClientSession } from '../../common/mongo';
import { MongoDatasetTraining } from './training/schema';
import { MongoDatasetData } from './data/schema';
import { deleteDatasetDataVector } from '../../common/vectorStore/controller';
import { deleteDatasetDataVector } from '../../common/vectorDB/controller';
import { MongoDatasetDataText } from './data/dataTextSchema';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { retryFn } from '@fastgpt/global/common/system/utils';

View File

@@ -3,7 +3,7 @@ import {
DatasetSearchModeMap,
SearchScoreTypeEnum
} from '@fastgpt/global/core/dataset/constants';
import { recallFromVectorStore } from '../../../common/vectorStore/controller';
import { recallFromVectorStore } from '../../../common/vectorDB/controller';
import { getVectorsByText } from '../../ai/embedding';
import { getEmbeddingModel, getDefaultRerankModel, getLLMModel } from '../../ai/model';
import { MongoDatasetData } from '../data/schema';
@@ -62,7 +62,8 @@ export type SearchDatasetDataProps = {
export type SearchDatasetDataResponse = {
searchRes: SearchDataResponseItemType[];
tokens: number;
embeddingTokens: number;
reRankInputTokens: number;
searchMode: `${DatasetSearchModeEnum}`;
limit: number;
similarity: number;
@@ -86,8 +87,11 @@ export const datasetDataReRank = async ({
rerankModel?: RerankModelItemType;
data: SearchDataResponseItemType[];
query: string;
}): Promise<SearchDataResponseItemType[]> => {
const results = await reRankRecall({
}): Promise<{
results: SearchDataResponseItemType[];
inputTokens: number;
}> => {
const { results, inputTokens } = await reRankRecall({
model: rerankModel,
query,
documents: data.map((item) => ({
@@ -114,7 +118,10 @@ export const datasetDataReRank = async ({
})
.filter(Boolean) as SearchDataResponseItemType[];
return mergeResult;
return {
results: mergeResult,
inputTokens
};
};
export const filterDatasetDataByMaxTokens = async (
data: SearchDataResponseItemType[],
@@ -694,14 +701,23 @@ export async function searchDatasetData(
const { embeddingLimit, fullTextLimit } = countRecallLimit();
// recall
const { embeddingRecallResults, fullTextRecallResults, tokens } = await multiQueryRecall({
const {
embeddingRecallResults,
fullTextRecallResults,
tokens: embeddingTokens
} = await multiQueryRecall({
embeddingLimit,
fullTextLimit
});
// ReRank results
const reRankResults = await (async () => {
if (!usingReRank) return [];
const { results: reRankResults, inputTokens: reRankInputTokens } = await (async () => {
if (!usingReRank) {
return {
results: [],
inputTokens: 0
};
}
set = new Set<string>(embeddingRecallResults.map((item) => item.id));
const concatRecallResults = embeddingRecallResults.concat(
@@ -725,7 +741,10 @@ export async function searchDatasetData(
});
} catch (error) {
usingReRank = false;
return [];
return {
results: [],
inputTokens: 0
};
}
})();
@@ -790,7 +809,8 @@ export async function searchDatasetData(
return {
searchRes: filterMaxTokensResult,
tokens,
embeddingTokens,
reRankInputTokens,
searchMode,
limit: maxTokens,
similarity,

View File

@@ -12,7 +12,6 @@ import { NodeInputKeyEnum, NodeOutputKeyEnum } from '@fastgpt/global/core/workfl
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
import { ChatNodeUsageType } from '@fastgpt/global/support/wallet/bill/type';
import { checkTeamReRankPermission } from '../../../../support/permission/teamLimit';
import { MongoDataset } from '../../../dataset/schema';
import { i18nT } from '../../../../../web/i18n/utils';
import { filterDatasetsByTmbId } from '../../../dataset/utils';
@@ -119,6 +118,8 @@ export async function dispatchDatasetSearch(
const vectorModel = getEmbeddingModel(
(await MongoDataset.findById(datasets[0].datasetId, 'vectorModel').lean())?.vectorModel
);
// Get Rerank Model
const rerankModelData = getRerankModel(rerankModel);
// start search
const searchData = {
@@ -132,14 +133,15 @@ export async function dispatchDatasetSearch(
datasetIds,
searchMode,
embeddingWeight,
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId)),
rerankModel: getRerankModel(rerankModel),
usingReRank,
rerankModel: rerankModelData,
rerankWeight,
collectionFilterMatch
};
const {
searchRes,
tokens,
embeddingTokens,
reRankInputTokens,
usingSimilarityFilter,
usingReRank: searchUsingReRank,
queryExtensionResult,
@@ -164,17 +166,29 @@ export async function dispatchDatasetSearch(
const { totalPoints: embeddingTotalPoints, modelName: embeddingModelName } =
formatModelChars2Points({
model: vectorModel.model,
inputTokens: tokens,
inputTokens: embeddingTokens,
modelType: ModelTypeEnum.embedding
});
nodeDispatchUsages.push({
totalPoints: embeddingTotalPoints,
moduleName: node.name,
model: embeddingModelName,
inputTokens: tokens
inputTokens: embeddingTokens
});
// Rerank
const { totalPoints: reRankTotalPoints, modelName: reRankModelName } = formatModelChars2Points({
model: rerankModelData.model,
inputTokens: reRankInputTokens,
modelType: ModelTypeEnum.rerank
});
nodeDispatchUsages.push({
totalPoints: reRankTotalPoints,
moduleName: node.name,
model: reRankModelName,
inputTokens: reRankInputTokens
});
// Query extension
const { totalPoints: queryExtensionTotalPoints } = (() => {
(() => {
if (queryExtensionResult) {
const { totalPoints, modelName } = formatModelChars2Points({
model: queryExtensionResult.model,
@@ -198,7 +212,7 @@ export async function dispatchDatasetSearch(
};
})();
// Deep search
const { totalPoints: deepSearchTotalPoints } = (() => {
(() => {
if (deepSearchResult) {
const { totalPoints, modelName } = formatModelChars2Points({
model: deepSearchResult.model,
@@ -221,20 +235,26 @@ export async function dispatchDatasetSearch(
totalPoints: 0
};
})();
const totalPoints = embeddingTotalPoints + queryExtensionTotalPoints + deepSearchTotalPoints;
const totalPoints = nodeDispatchUsages.reduce((acc, item) => acc + item.totalPoints, 0);
const responseData: DispatchNodeResponseType & { totalPoints: number } = {
totalPoints,
query: userChatInput,
model: vectorModel.model,
inputTokens: tokens,
embeddingModel: vectorModel.name,
embeddingTokens,
similarity: usingSimilarityFilter ? similarity : undefined,
limit,
searchMode,
embeddingWeight: searchMode === DatasetSearchModeEnum.mixedRecall ? embeddingWeight : undefined,
rerankModel: usingReRank ? getRerankModel(rerankModel)?.name : undefined,
rerankWeight: usingReRank ? rerankWeight : undefined,
searchUsingReRank: searchUsingReRank,
// Rerank
...(searchUsingReRank && {
rerankModel: rerankModelData?.name,
rerankWeight: rerankWeight,
reRankInputTokens
}),
searchUsingReRank,
// Results
quoteList: searchRes,
queryExtensionResult,
deepSearchResult

View File

@@ -74,7 +74,7 @@ import { dispatchLoopStart } from './loop/runLoopStart';
import { dispatchFormInput } from './interactive/formInput';
import { dispatchToolParams } from './agent/runTool/toolParams';
import { getErrText } from '@fastgpt/global/common/error/utils';
import { filterModuleTypeList } from '@fastgpt/global/core/chat/utils';
import { filterPublicNodeResponseData } from '@fastgpt/global/core/chat/utils';
import { dispatchRunTool } from './plugin/runTool';
const callbackMap: Record<FlowNodeTypeEnum, Function> = {
@@ -137,8 +137,10 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
stream = false,
version = 'v1',
responseDetail = true,
responseAllData = true,
...props
} = data;
const startTime = Date.now();
rewriteRuntimeWorkFlow(runtimeNodes, runtimeEdges);
@@ -162,16 +164,24 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
[DispatchNodeResponseKeyEnum.runTimes]: 1,
[DispatchNodeResponseKeyEnum.assistantResponses]: [],
[DispatchNodeResponseKeyEnum.toolResponses]: null,
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables)
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables),
durationSeconds: 0
};
}
let workflowRunTimes = 0;
// set sse response headers
// Init
if (isRootRuntime) {
// set sse response headers
res?.setHeader('Connection', 'keep-alive'); // Set keepalive for long connection
if (stream && res) {
res.on('close', () => res.end());
res.on('error', () => {
addLog.error('Request error');
res.end();
});
res.setHeader('Content-Type', 'text/event-stream;charset=utf-8');
res.setHeader('Access-Control-Allow-Origin', '*');
res.setHeader('X-Accel-Buffering', 'no');
@@ -191,13 +201,14 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
};
sendStreamTimerSign();
}
}
variables = {
...getSystemVariable(data),
...externalProvider.externalWorkflowVariables,
...variables
};
// Add system variables
variables = {
...getSystemVariable(data),
...externalProvider.externalWorkflowVariables,
...variables
};
}
let chatResponses: ChatHistoryItemResType[] = []; // response request and save to database
let chatAssistantResponse: AIChatItemValueItemType[] = []; // The value will be returned to the user
@@ -640,16 +651,15 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
})();
// Response node response
if (
version === 'v2' &&
!props.isToolCall &&
isRootRuntime &&
formatResponseData &&
!(responseDetail === false && filterModuleTypeList.includes(formatResponseData.moduleType))
) {
if (version === 'v2' && !props.isToolCall && isRootRuntime && formatResponseData) {
props.workflowStreamResponse?.({
event: SseResponseEventEnum.flowNodeResponse,
data: formatResponseData
data: responseAllData
? formatResponseData
: filterPublicNodeResponseData({
flowResponses: [formatResponseData],
responseDetail
})[0]
});
}
@@ -737,6 +747,15 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
}
})();
const durationSeconds = +((Date.now() - startTime) / 1000).toFixed(2);
if (isRootRuntime && stream) {
props.workflowStreamResponse?.({
event: SseResponseEventEnum.workflowDuration,
data: { durationSeconds }
});
}
return {
flowResponses: chatResponses,
flowUsages: chatNodeUsages,
@@ -750,7 +769,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
[DispatchNodeResponseKeyEnum.assistantResponses]:
mergeAssistantResponseAnswerText(chatAssistantResponse),
[DispatchNodeResponseKeyEnum.toolResponses]: toolRunResponse,
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables)
newVariables: removeSystemVariable(variables, externalProvider.externalWorkflowVariables),
durationSeconds
};
} catch (error) {
return Promise.reject(error);

View File

@@ -4,7 +4,8 @@ import {
} from '@fastgpt/global/core/workflow/runtime/type';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import { NodeOutputKeyEnum } from '@fastgpt/global/core/workflow/constants';
import getMCPClient from '../../../app/mcp';
import { MCPClient } from '../../../app/mcp';
import { getErrText } from '@fastgpt/global/common/error/utils';
type RunToolProps = ModuleDispatchProps<{
toolData: {
@@ -14,7 +15,7 @@ type RunToolProps = ModuleDispatchProps<{
}>;
type RunToolResponse = DispatchNodeResultType<{
[NodeOutputKeyEnum.rawResponse]: any;
[NodeOutputKeyEnum.rawResponse]?: any;
}>;
export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolResponse> => {
@@ -26,7 +27,7 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
const { toolData, ...restParams } = params;
const { name: toolName, url } = toolData;
const mcpClient = getMCPClient({ url });
const mcpClient = new MCPClient({ url });
try {
const result = await mcpClient.toolCall(toolName, restParams);
@@ -40,7 +41,12 @@ export const dispatchRunTool = async (props: RunToolProps): Promise<RunToolRespo
[NodeOutputKeyEnum.rawResponse]: result
};
} catch (error) {
console.error('Error running MCP tool:', error);
return Promise.reject(error);
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
moduleLogo: avatar,
error: getErrText(error)
},
[DispatchNodeResponseKeyEnum.toolResponses]: getErrText(error)
};
}
};

View File

@@ -26,6 +26,7 @@ export type DispatchFlowResponse = {
[DispatchNodeResponseKeyEnum.assistantResponses]: AIChatItemValueItemType[];
[DispatchNodeResponseKeyEnum.runTimes]: number;
newVariables: Record<string, string>;
durationSeconds: number;
};
export type WorkflowResponseType = ({