mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
Tool call support interactive node (#2903)
* feat: tool call support interactive node * feat: interactive node tool response * fix: tool call concat * fix: llm history concat
This commit is contained in:
@@ -66,6 +66,7 @@ import { useContextSelector } from 'use-context-selector';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import { useCreation, useMemoizedFn, useThrottleFn } from 'ahooks';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
|
||||
const ResponseTags = dynamic(() => import('./components/ResponseTags'));
|
||||
const FeedbackModal = dynamic(() => import('./components/FeedbackModal'));
|
||||
@@ -383,7 +384,7 @@ const ChatBox = (
|
||||
/**
|
||||
* user confirm send prompt
|
||||
*/
|
||||
const sendPrompt: SendPromptFnType = useCallback(
|
||||
const sendPrompt: SendPromptFnType = useMemoizedFn(
|
||||
({
|
||||
text = '',
|
||||
files = [],
|
||||
@@ -458,7 +459,6 @@ const ChatBox = (
|
||||
] as UserChatItemValueItemType[],
|
||||
status: ChatStatusEnum.finish
|
||||
},
|
||||
// 普通 chat 模式,需要增加一个 AI 来接收响应消息
|
||||
{
|
||||
dataId: responseChatId,
|
||||
obj: ChatRoleEnum.AI,
|
||||
@@ -492,9 +492,11 @@ const ChatBox = (
|
||||
const abortSignal = new AbortController();
|
||||
chatController.current = abortSignal;
|
||||
|
||||
// 最后一条 AI 消息是空的,会被过滤掉,这里得到的 messages,不会包含最后一条 AI 消息,所以不需要 slice 了。
|
||||
// 这里,无论是否为交互模式,最后都是 Human 的消息。
|
||||
const messages = chats2GPTMessages({ messages: newChatList, reserveId: true });
|
||||
const messages = chats2GPTMessages({
|
||||
messages: newChatList.slice(0, -1),
|
||||
reserveId: true
|
||||
});
|
||||
|
||||
const {
|
||||
responseData,
|
||||
@@ -519,7 +521,7 @@ const ChatBox = (
|
||||
...item,
|
||||
status: ChatStatusEnum.finish,
|
||||
responseData: item.responseData
|
||||
? [...item.responseData, ...responseData]
|
||||
? mergeChatResponseData([...item.responseData, ...responseData])
|
||||
: responseData
|
||||
};
|
||||
});
|
||||
@@ -571,28 +573,7 @@ const ChatBox = (
|
||||
console.log(err);
|
||||
}
|
||||
)();
|
||||
},
|
||||
[
|
||||
abortRequest,
|
||||
allVariableList,
|
||||
chatHistories,
|
||||
createQuestionGuide,
|
||||
finishSegmentedAudio,
|
||||
generatingMessage,
|
||||
generatingScroll,
|
||||
isChatting,
|
||||
isPc,
|
||||
onStartChat,
|
||||
resetInputVal,
|
||||
scrollToBottom,
|
||||
setAudioPlayingChatId,
|
||||
setChatHistories,
|
||||
splitText2Audio,
|
||||
startSegmentedAudio,
|
||||
t,
|
||||
toast,
|
||||
variablesForm
|
||||
]
|
||||
}
|
||||
);
|
||||
|
||||
// retry input
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { StreamResponseType } from '@/web/common/api/fetch';
|
||||
import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type';
|
||||
import { ChatSiteItemType, ToolModuleResponseItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { InteractiveNodeResponseItemType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
import { WorkflowInteractiveResponseType } from '@fastgpt/global/core/workflow/template/system/interactive/type';
|
||||
|
||||
export type generatingMessageProps = {
|
||||
event: SseResponseEventEnum;
|
||||
@@ -9,7 +9,7 @@ export type generatingMessageProps = {
|
||||
name?: string;
|
||||
status?: 'running' | 'finish';
|
||||
tool?: ToolModuleResponseItemType;
|
||||
interactive?: InteractiveNodeResponseItemType;
|
||||
interactive?: WorkflowInteractiveResponseType;
|
||||
variables?: Record<string, any>;
|
||||
};
|
||||
|
||||
|
@@ -85,7 +85,7 @@ const RenderTool = React.memo(
|
||||
})();
|
||||
|
||||
return (
|
||||
<Accordion key={tool.id} allowToggle>
|
||||
<Accordion key={tool.id} allowToggle _notLast={{ mb: 2 }}>
|
||||
<AccordionItem borderTop={'none'} borderBottom={'none'}>
|
||||
<AccordionButton
|
||||
w={'auto'}
|
||||
|
@@ -140,6 +140,12 @@ export const WholeResponseContent = ({
|
||||
value={formatNumber(activeModule.totalPoints)}
|
||||
/>
|
||||
)}
|
||||
{activeModule?.childTotalPoints !== undefined && (
|
||||
<Row
|
||||
label={t('chat:response.child total points')}
|
||||
value={formatNumber(activeModule.childTotalPoints)}
|
||||
/>
|
||||
)}
|
||||
<Row
|
||||
label={t('common:core.chat.response.module time')}
|
||||
value={`${activeModule?.runningTime || 0}s`}
|
||||
|
@@ -29,7 +29,6 @@ import {
|
||||
} from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type/node';
|
||||
import { getWorkflowResponseWrite } from '@fastgpt/service/core/workflow/dispatch/utils';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { WORKFLOW_MAX_RUN_TIMES } from '@fastgpt/service/core/workflow/constants';
|
||||
import { getPluginInputsFromStoreNodes } from '@fastgpt/global/core/app/plugin/utils';
|
||||
|
||||
|
Reference in New Issue
Block a user