Release update (#1580)

* release doc

* fix: reg metch

* perf: tool call arg

* fix: stream update variables

* remove status

* update prompt

* rename embeddong model
This commit is contained in:
Archer
2024-05-24 11:07:03 +08:00
committed by GitHub
parent 92a3d6d268
commit 9c7c74050b
23 changed files with 119 additions and 93 deletions

View File

@@ -69,7 +69,7 @@ curl --location --request POST 'https://api.fastgpt.in/api/v1/chat/completions'
## 响应
{{< tabs tabTotal="4" >}}
{{< tabs tabTotal="5" >}}
{{< tab tabName="detail=false,stream=false 响应" >}}
{{< markdownify >}}
@@ -242,6 +242,24 @@ event: appStreamResponse
data: [{"moduleName":"知识库搜索","moduleType":"datasetSearchNode","runningTime":1.78},{"question":"导演是谁","quoteList":[{"id":"654f2e49b64caef1d9431e8b","q":"电影《铃芽之旅》的导演是谁?","a":"电影《铃芽之旅》的导演是新海诚!","indexes":[{"type":"qa","dataId":"3515487","text":"电影《铃芽之旅》的导演是谁?","_id":"654f2e49b64caef1d9431e8c","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8935586214065552},{"id":"6552e14c50f4a2a8e632af11","q":"导演是谁?","a":"电影《铃芽之旅》的导演是新海诚。","indexes":[{"defaultIndex":true,"type":"qa","dataId":"3644565","text":"导演是谁?\n电影《铃芽之旅》的导演是新海诚。","_id":"6552e14dde5cc7ba3954e417"}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8890955448150635},{"id":"654f34a0b64caef1d946337e","q":"本作的主人公是谁?","a":"本作的主人公是名叫铃芽的少女。","indexes":[{"type":"qa","dataId":"3515541","text":"本作的主人公是谁?","_id":"654f34a0b64caef1d946337f","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8738770484924316},{"id":"654f3002b64caef1d944207a","q":"电影《铃芽之旅》男主角是谁?","a":"电影《铃芽之旅》男主角是宗像草太,由松村北斗配音。","indexes":[{"type":"qa","dataId":"3515538","text":"电影《铃芽之旅》男主角是谁?","_id":"654f3002b64caef1d944207b","defaultIndex":true}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8607980012893677},{"id":"654f2fc8b64caef1d943fd46","q":"电影《铃芽之旅》的编剧是谁?","a":"新海诚是本片的编剧。","indexes":[{"defaultIndex":true,"type":"qa","dataId":"3515550","text":"电影《铃芽之旅》的编剧是谁22","_id":"654f2fc8b64caef1d943fd47"}],"datasetId":"646627f4f7b896cfd8910e38","collectionId":"653279b16cd42ab509e766e8","sourceName":"data (81).csv","sourceId":"64fd3b6423aa1307b65896f6","score":0.8468944430351257}],"moduleName":"AI 对话","moduleType":"chatNode","runningTime":1.86}]
```
{{< /markdownify >}}
{{< /tab >}}
{{< tab tabName="detail=true,stream=true 时event值" >}}
{{< markdownify >}}
event取值
- answer: 返回给客户端的文本(最终会算作回答)
- fastAnswer: 指定回复返回给客户端的文本(最终会算作回答)
- toolCall: 执行工具
- toolParams: 工具参数
- toolResponse: 工具返回
- flowNodeStatus: 运行到的节点状态
- flowResponses: 节点完整响应
- updateVariables: 更新变量
- error: 报错
{{< /markdownify >}}
{{< /tab >}}
{{< /tabs >}}

View File

@@ -1,5 +1,5 @@
---
title: 'V4.8.1(进行中)'
title: 'V4.8.1'
description: 'FastGPT V4.8.1 更新说明'
icon: 'upgrade'
draft: false
@@ -34,12 +34,6 @@ curl --location --request POST 'https://{{host}}/api/admin/clearInvalidData' \
## V4.8.1 更新说明
1. 新增 - 知识库重新选择向量模型重建
2. 新增 - 对话框支持问题模糊检索提示,可自定义预设问题词库。
3. 新增 - 工作流节点版本变更提示,并可以同步最新版本配置,避免存在隐藏脏数据。
4. 新增 - 开放文件导入知识库接口到开源版, [点击插件文档](/docs/development/openapi/dataset/#创建一个文件集合)
5. 新增 - 外部文件源知识库, [点击查看文档](/docs/course/externalfile/)
6. 优化 - 插件输入的 debug 模式,支持全量参数输入渲染。
7. 修复 - 插件输入默认值被清空问题。
8. 修复 - 工作流删除节点的动态输入和输出时候,没有正确的删除连接线,导致可能出现逻辑异常。
9. 修复 - 定时器清理脏数据任务
使用 Chat api 接口需要注意,增加了 event: updateVariables 事件,用于更新变量。
[点击查看升级说明](https://github.com/labring/FastGPT/releases/tag/v4.8.1)

View File

@@ -69,8 +69,8 @@ services:
wait $$!
fastgpt:
container_name: fastgpt
image: ghcr.io/labring/fastgpt:v4.8 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8 # 阿里云
image: ghcr.io/labring/fastgpt:v4.8.1 # git
# image: registry.cn-hangzhou.aliyuncs.com/fastgpt/fastgpt:v4.8.1 # 阿里云
ports:
- 3000:3000
networks:

View File

@@ -71,7 +71,7 @@ export const replaceRegChars = (text: string) => text.replace(/[.*+?^${}()|[\]\\
export const sliceJsonStr = (str: string) => {
str = str.replace(/(\\n|\\)/g, '').replace(/ /g, '');
const jsonRegex = /{[^{}]*}/g;
const jsonRegex = /{(?:[^{}]|{(?:[^{}]|{[^{}]*})*})*}/g;
const matches = str.match(jsonRegex);
if (!matches) {

View File

@@ -16,7 +16,7 @@ export const VariableUpdateNode: FlowNodeTemplateType = {
avatar: '/imgs/workflow/variable.png',
name: '变量更新',
intro: '可以更新指定节点的输出值或更新全局变量',
showStatus: true,
showStatus: false,
isTool: false,
version: '481',
inputs: [

View File

@@ -15,7 +15,7 @@ const defaultPrompt = `作为一个向量检索助手,你的任务是结合历
"""
"""
原问题: 介绍下剧情。
检索词: ["介绍下故事的背景和主要人物。","故事的主题是什么?","剧情是是如何发展的?"]
检索词: ["介绍下故事的背景。","故事的主题是什么?","介绍下故事的主要人物。"]
----------------
历史记录:
"""
@@ -41,7 +41,7 @@ Q: 护产假多少天?
A: 护产假的天数根据员工所在的城市而定。请提供您所在的城市,以便我回答您的问题。
"""
原问题: 沈阳
检索词: ["沈阳的护产假多少天?"]
检索词: ["沈阳的护产假多少天?","沈阳的护产假政策。","沈阳的护产假标准。"]
----------------
历史记录:
"""
@@ -75,7 +75,7 @@ A: 1. 开源
3. 扩展性强
"""
原问题: 介绍下第2点。
检索词: ["介绍下 FastGPT 简便的优势"]。
检索词: ["介绍下 FastGPT 简便的优势", "从哪些方面,可以体现出 FastGPT 的简便"]。
----------------
历史记录:
"""

View File

@@ -1,7 +1,7 @@
export const Prompt_Tool_Call = `<Instruction>
你是一个智能机器人,除了可以回答用户问题外,你还掌握工具的使用能力。有时候,你可以依赖工具的运行结果,来更准确的回答用户。
工具使用了 JSON Schema 的格式声明,其中 toolId 是工具的 description 是工具的描述parameters 是工具的参数,包括参数的类型和描required 是必填参数的列表。
工具使用了 JSON Schema 的格式声明,其中 toolId 是工具的唯一标识, description 是工具的描述parameters 是工具的参数及参数表required 是必填参数的列表。
请你根据工具描述决定回答问题或是使用工具。在完成任务过程中USER代表用户的输入TOOL_RESPONSE代表工具运行结果ANSWER 代表你的输出。
你的每次输出都必须以0,1开头代表是否需要调用工具
@@ -13,19 +13,19 @@ export const Prompt_Tool_Call = `<Instruction>
USER: 你好呀
ANSWER: 0: 你好,有什么可以帮助你的么?
USER: 现在几点了?
ANSWER: 1: {"toolId":"timeToolId"}
ANSWER: 1: {"toolId":"searchToolId1"}
TOOL_RESPONSE: """
2022/5/5 12:00 Thursday
"""
ANSWER: 0: 现在是2022年5月5日星期四中午12点。
USER: 今天杭州的天气如何?
ANSWER: 1: {"toolId":"testToolId","arguments":{"city": "杭州"}}
ANSWER: 1: {"toolId":"searchToolId2","arguments":{"city": "杭州"}}
TOOL_RESPONSE: """
晴天......
"""
ANSWER: 0: 今天杭州是晴天。
USER: 今天杭州的天气适合去哪里玩?
ANSWER: 1: {"toolId":"testToolId2","arguments":{"query": "杭州 天气 去哪里玩"}}
ANSWER: 1: {"toolId":"searchToolId3","arguments":{"query": "杭州 天气 去哪里玩"}}
TOOL_RESPONSE: """
晴天. 西湖、灵隐寺、千岛湖……
"""

View File

@@ -385,7 +385,10 @@ async function streamResponse({
});
}
}
continue;
}
/* arg 插入最后一个工具的参数里 */
const arg: string = functionCall?.arguments || '';
const currentTool = functionCalls[functionCalls.length - 1];

View File

@@ -125,7 +125,6 @@ export const runToolWithPromptCall = async (
})();
const { answer: replaceAnswer, toolJson } = parseAnswer(answer);
// console.log(parseAnswer, '==11==');
// No tools
if (!toolJson) {
if (replaceAnswer === ERROR_TEXT && stream && detail) {

View File

@@ -354,7 +354,7 @@ async function streamResponse({
}
const responseChoice = part.choices?.[0]?.delta;
// console.log(JSON.stringify(responseChoice, null, 2));
if (responseChoice?.content) {
const content = responseChoice.content || '';
textAnswer += content;
@@ -369,7 +369,7 @@ async function streamResponse({
} else if (responseChoice?.tool_calls?.[0]) {
const toolCall: ChatCompletionMessageToolCall = responseChoice.tool_calls[0];
// 流响应中,每次只会返回一个工具. 如果带了 id说明是执行一个工具
// In a stream response, only one tool is returned at a time. If have id, description is executing a tool
if (toolCall.id) {
const toolNode = toolNodes.find((item) => item.nodeId === toolCall.function?.name);
@@ -400,10 +400,14 @@ async function streamResponse({
});
}
}
continue;
}
/* arg 插入最后一个工具的参数里 */
const arg: string = responseChoice.tool_calls?.[0]?.function?.arguments;
const arg: string = toolCall?.function?.arguments;
const currentTool = toolCalls[toolCalls.length - 1];
if (currentTool) {
currentTool.function.arguments += arg;

View File

@@ -28,7 +28,7 @@ import { dispatchQueryExtension } from './tools/queryExternsion';
import { dispatchRunPlugin } from './plugin/run';
import { dispatchPluginInput } from './plugin/runInput';
import { dispatchPluginOutput } from './plugin/runOutput';
import { valueTypeFormat } from './utils';
import { removeSystemVariable, valueTypeFormat } from './utils';
import {
filterWorkflowEdges,
checkNodeRunStatus
@@ -419,18 +419,6 @@ export function getSystemVariable({
};
}
/* remove system variable */
const removeSystemVariable = (variables: Record<string, any>) => {
const copyVariables = { ...variables };
delete copyVariables.appId;
delete copyVariables.chatId;
delete copyVariables.responseChatItemId;
delete copyVariables.histories;
delete copyVariables.cTime;
return copyVariables;
};
/* Merge consecutive text messages into one */
export const mergeAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) => {
const result: AIChatItemValueItemType[] = [];

View File

@@ -1,19 +1,22 @@
import { NodeInputKeyEnum, VARIABLE_NODE_ID } from '@fastgpt/global/core/workflow/constants';
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
import {
DispatchNodeResponseKeyEnum,
SseResponseEventEnum
} from '@fastgpt/global/core/workflow/runtime/constants';
import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/type';
import { getReferenceVariableValue } from '@fastgpt/global/core/workflow/runtime/utils';
import { TUpdateListItem } from '@fastgpt/global/core/workflow/template/system/variableUpdate/type';
import { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type';
import { valueTypeFormat } from '../utils';
import { removeSystemVariable, valueTypeFormat } from '../utils';
import { responseWrite } from '../../../../common/response';
type Props = ModuleDispatchProps<{
[NodeInputKeyEnum.updateList]: TUpdateListItem[];
}>;
type Response = DispatchNodeResultType<{}>;
export const dispatchUpdateVariable = async (
props: Props
): Promise<DispatchNodeResultType<any>> => {
const { params, variables, runtimeNodes } = props;
export const dispatchUpdateVariable = async (props: Props): Promise<Response> => {
const { res, detail, params, variables, runtimeNodes } = props;
const { updateList } = params;
updateList.forEach((item) => {
@@ -51,6 +54,14 @@ export const dispatchUpdateVariable = async (
}
});
if (detail) {
responseWrite({
res,
event: SseResponseEventEnum.updateVariables,
data: JSON.stringify(removeSystemVariable(variables))
});
}
return {
[DispatchNodeResponseKeyEnum.nodeResponse]: {
totalPoints: 0

View File

@@ -79,3 +79,15 @@ export const valueTypeFormat = (value: any, type?: WorkflowIOValueTypeEnum) => {
return value;
};
/* remove system variable */
export const removeSystemVariable = (variables: Record<string, any>) => {
const copyVariables = { ...variables };
delete copyVariables.appId;
delete copyVariables.chatId;
delete copyVariables.responseChatItemId;
delete copyVariables.histories;
delete copyVariables.cTime;
return copyVariables;
};

View File

@@ -108,7 +108,7 @@
},
{
"model": "text-embedding-ada-002",
"name": "Embedding-2",
"name": "text-embedding-ada-002",
"avatar": "/imgs/model/openai.svg",
"charsPointsPrice": 0,
"defaultToken": 512,

View File

@@ -59,6 +59,7 @@ import ChatItem from './components/ChatItem';
import dynamic from 'next/dynamic';
import { useCreation } from 'ahooks';
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
import type { StreamResponseType } from '@/web/common/api/fetch';
const ResponseTags = dynamic(() => import('./ResponseTags'));
const FeedbackModal = dynamic(() => import('./FeedbackModal'));
@@ -90,12 +91,11 @@ type Props = OutLinkChatAuthProps & {
chatId?: string;
onUpdateVariable?: (e: Record<string, any>) => void;
onStartChat?: (e: StartChatFnProps) => Promise<{
responseText: string;
[DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[];
newVariables?: Record<string, any>;
onStartChat?: (e: StartChatFnProps) => Promise<
StreamResponseType & {
isNewChat?: boolean;
}>;
}
>;
onDelMessage?: (e: { contentId: string }) => void;
};
@@ -207,7 +207,8 @@ const ChatBox = (
status,
name,
tool,
autoTTSResponse
autoTTSResponse,
variables
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
setChatHistories((state) =>
state.map((item, index) => {
@@ -290,6 +291,8 @@ const ChatBox = (
return val;
})
};
} else if (event === SseResponseEventEnum.updateVariables && variables) {
setValue('variables', variables);
}
return item;
@@ -297,7 +300,7 @@ const ChatBox = (
);
generatingScroll();
},
[generatingScroll, setChatHistories, splitText2Audio]
[generatingScroll, setChatHistories, setValue, splitText2Audio]
);
// 重置输入内容
@@ -466,7 +469,6 @@ const ChatBox = (
const {
responseData,
responseText,
newVariables,
isNewChat = false
} = await onStartChat({
chatList: newChatList,
@@ -476,8 +478,6 @@ const ChatBox = (
variables: requestVariables
});
newVariables && setValue('variables', newVariables);
isNewChatReplace.current = isNewChat;
// set finish status
@@ -561,7 +561,6 @@ const ChatBox = (
resetInputVal,
setAudioPlayingChatId,
setChatHistories,
setValue,
splitText2Audio,
startSegmentedAudio,
t,
@@ -696,7 +695,7 @@ const ChatBox = (
} catch (error) {}
};
},
[appId, chatId, feedbackType, outLinkUid, shareId, teamId, teamToken]
[appId, chatId, feedbackType, outLinkUid, setChatHistories, shareId, teamId, teamToken]
);
const onCloseUserLike = useCallback(
(chat: ChatSiteItemType) => {

View File

@@ -8,11 +8,12 @@ import {
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
export type generatingMessageProps = {
event: `${SseResponseEventEnum}`;
event: SseResponseEventEnum;
text?: string;
name?: string;
status?: 'running' | 'finish';
tool?: ToolModuleResponseItemType;
variables?: Record<string, any>;
};
export type UserInputFileItemType = {

View File

@@ -62,7 +62,7 @@ const ChatTest = (
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
url: '/api/core/chat/chatTest',
data: {
history,
@@ -78,7 +78,7 @@ const ChatTest = (
abortCtrl: controller
});
return { responseText, responseData, newVariables };
return { responseText, responseData };
},
[appDetail._id, appDetail.name, edges, nodes]
);

View File

@@ -72,7 +72,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
/* start process */
const { flowResponses, flowUsages, newVariables } = await dispatchWorkFlow({
const { flowResponses, flowUsages } = await dispatchWorkFlow({
res,
mode: 'test',
teamId,
@@ -99,11 +99,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
event: SseResponseEventEnum.flowResponses,
data: JSON.stringify(flowResponses)
});
responseWrite({
res,
event: SseResponseEventEnum.updateVariables,
data: JSON.stringify(newVariables)
});
res.end();
pushChatUsage({

View File

@@ -292,11 +292,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
});
if (detail) {
responseWrite({
res,
event: SseResponseEventEnum.updateVariables,
data: JSON.stringify(newVariables)
});
if (responseDetail) {
responseWrite({
res,
@@ -315,7 +310,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
return assistantResponses;
})();
res.json({
...(detail ? { responseData: feResponseData } : {}),
...(detail ? { responseData: feResponseData, newVariables } : {}),
id: chatId || '',
model: '',
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },

View File

@@ -74,7 +74,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const prompts = messages.slice(-2);
const completionChatId = chatId ? chatId : nanoid();
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables,
@@ -123,7 +123,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
history: ChatBoxRef.current?.getChatHistories() || state.history
}));
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[appId, chatId, histories, pushHistory, router, setChatData, updateHistory]
);

View File

@@ -95,7 +95,7 @@ const OutLink = ({
'*'
);
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables: {
@@ -169,7 +169,7 @@ const OutLink = ({
'*'
);
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[
chatId,

View File

@@ -21,10 +21,7 @@ import ChatHistorySlider from './components/ChatHistorySlider';
import ChatHeader from './components/ChatHeader';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import {
checkChatSupportSelectFileByChatModels,
getAppQuestionGuidesByUserGuideModule
} from '@/web/core/chat/utils';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
import { useChatStore } from '@/web/core/chat/storeChat';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
@@ -38,8 +35,6 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import MyBox from '@fastgpt/web/components/common/MyBox';
import SliderApps from './components/SliderApps';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
import { getAppQGuideCustomURL } from '@/web/core/app/utils';
const OutLink = () => {
const { t } = useTranslation();
@@ -84,7 +79,7 @@ const OutLink = () => {
const prompts = messages.slice(-2);
const completionChatId = chatId ? chatId : nanoid();
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables: {
@@ -140,9 +135,20 @@ const OutLink = () => {
history: ChatBoxRef.current?.getChatHistories() || state.history
}));
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[appId, teamToken, chatId, histories, pushHistory, router, setChatData, teamId, updateHistory]
[
chatId,
customVariables,
appId,
teamId,
teamToken,
setChatData,
pushHistory,
router,
histories,
updateHistory
]
);
/* replace router query to last chat */

View File

@@ -18,10 +18,9 @@ type StreamFetchProps = {
onMessage: StartChatFnProps['generatingMessage'];
abortCtrl: AbortController;
};
type StreamResponseType = {
export type StreamResponseType = {
responseText: string;
[DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[];
newVariables: Record<string, any>;
};
class FatalError extends Error {}
@@ -50,7 +49,6 @@ export const streamFetch = ({
)[] = [];
let errMsg: string | undefined;
let responseData: ChatHistoryItemResType[] = [];
let newVariables: Record<string, any> = {};
let finished = false;
const finish = () => {
@@ -58,7 +56,6 @@ export const streamFetch = ({
return failedFinish();
}
return resolve({
newVariables,
responseText,
responseData
});
@@ -71,7 +68,7 @@ export const streamFetch = ({
});
};
const isAnswerEvent = (event: `${SseResponseEventEnum}`) =>
const isAnswerEvent = (event: SseResponseEventEnum) =>
event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer;
// animate response to make it looks smooth
function animateResponseText() {
@@ -200,7 +197,10 @@ export const streamFetch = ({
} else if (event === SseResponseEventEnum.flowResponses && Array.isArray(parseJson)) {
responseData = parseJson;
} else if (event === SseResponseEventEnum.updateVariables) {
newVariables = parseJson;
onMessage({
event,
variables: parseJson
});
} else if (event === SseResponseEventEnum.error) {
if (parseJson.statusText === TeamErrEnum.aiPointsNotEnough) {
useSystemStore.getState().setIsNotSufficientModal(true);