mirror of
https://github.com/labring/FastGPT.git
synced 2025-10-16 08:01:18 +00:00
fix: ts (#4458)
This commit is contained in:
@@ -18,7 +18,7 @@ weight: 852
|
||||
{{% alert icon="🤖 " context="success" %}}
|
||||
* 该接口的 API Key 需使用`应用特定的 key`,否则会报错。
|
||||
|
||||
* 对话现在有`v1`和`v2`两个接口,可以按需使用,v2 自 4.9.4 版本新增,v1 接口同时不再维护
|
||||
<!-- * 对话现在有`v1`和`v2`两个接口,可以按需使用,v2 自 4.9.4 版本新增,v1 接口同时不再维护 -->
|
||||
|
||||
* 有些包调用时,`BaseUrl`需要添加`v1`路径,有些不需要,如果出现404情况,可补充`v1`重试。
|
||||
{{% /alert %}}
|
||||
@@ -34,7 +34,7 @@ weight: 852
|
||||
|
||||
### 请求
|
||||
|
||||
#### v2
|
||||
<!-- #### v2
|
||||
|
||||
v1,v2 接口请求参数一致,仅请求地址不一样。
|
||||
|
||||
@@ -390,9 +390,9 @@ event取值:
|
||||
|
||||
{{< /markdownify >}}
|
||||
{{< /tab >}}
|
||||
{{< /tabs >}}
|
||||
{{< /tabs >}}
|
||||
|
||||
#### v1
|
||||
#### v1 -->
|
||||
|
||||
{{< tabs tabTotal="5" >}}
|
||||
{{< tab tabName="detail=false,stream=false 响应" >}}
|
||||
|
@@ -47,7 +47,6 @@ curl --location --request POST 'https://{{host}}/api/admin/initv494' \
|
||||
4. 利用 redis 进行部分数据缓存。
|
||||
5. 站点同步支持配置训练参数。
|
||||
6. AI 对话/工具调用,增加返回模型 finish_reason 字段。
|
||||
7. completion v2 接口
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
|
@@ -18,6 +18,7 @@ import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
|
||||
import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext';
|
||||
import { AppFileSelectConfigType } from '@fastgpt/global/core/app/type';
|
||||
import { defaultAppSelectFileConfig } from '@fastgpt/global/core/app/constants';
|
||||
import { mergeChatResponseData } from '@fastgpt/global/core/chat/utils';
|
||||
|
||||
type PluginRunContextType = PluginRunBoxProps & {
|
||||
isChatting: boolean;
|
||||
@@ -46,11 +47,12 @@ const PluginRunContextProvider = ({
|
||||
|
||||
const pluginInputs = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.pluginInputs);
|
||||
const setTab = useContextSelector(ChatItemContext, (v) => v.setPluginRunTab);
|
||||
const variablesForm = useContextSelector(ChatItemContext, (v) => v.variablesForm);
|
||||
const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig);
|
||||
|
||||
const setChatRecords = useContextSelector(ChatRecordContext, (v) => v.setChatRecords);
|
||||
const chatRecords = useContextSelector(ChatRecordContext, (v) => v.chatRecords);
|
||||
|
||||
const chatConfig = useContextSelector(ChatItemContext, (v) => v.chatBoxData?.app?.chatConfig);
|
||||
|
||||
const { instruction = '', fileSelectConfig = defaultAppSelectFileConfig } = useMemo(
|
||||
() => chatConfig || {},
|
||||
[chatConfig]
|
||||
@@ -65,7 +67,7 @@ const PluginRunContextProvider = ({
|
||||
}, []);
|
||||
|
||||
const generatingMessage = useCallback(
|
||||
({ event, text = '', status, name, tool }: generatingMessageProps) => {
|
||||
({ event, text = '', status, name, tool, nodeResponse, variables }: generatingMessageProps) => {
|
||||
setChatRecords((state) =>
|
||||
state.map((item, index) => {
|
||||
if (index !== state.length - 1 || item.obj !== ChatRoleEnum.AI) return item;
|
||||
@@ -74,7 +76,14 @@ const PluginRunContextProvider = ({
|
||||
JSON.stringify(item.value[item.value.length - 1])
|
||||
);
|
||||
|
||||
if (event === SseResponseEventEnum.flowNodeStatus && status) {
|
||||
if (event === SseResponseEventEnum.flowNodeResponse && nodeResponse) {
|
||||
return {
|
||||
...item,
|
||||
responseData: item.responseData
|
||||
? [...item.responseData, nodeResponse]
|
||||
: [nodeResponse]
|
||||
};
|
||||
} else if (event === SseResponseEventEnum.flowNodeStatus && status) {
|
||||
return {
|
||||
...item,
|
||||
status,
|
||||
@@ -144,13 +153,15 @@ const PluginRunContextProvider = ({
|
||||
return val;
|
||||
})
|
||||
};
|
||||
} else if (event === SseResponseEventEnum.updateVariables && variables) {
|
||||
variablesForm.setValue('variables', variables);
|
||||
}
|
||||
|
||||
return item;
|
||||
})
|
||||
);
|
||||
},
|
||||
[setChatRecords]
|
||||
[setChatRecords, variablesForm]
|
||||
);
|
||||
|
||||
const isChatting = useMemo(
|
||||
@@ -226,7 +237,7 @@ const PluginRunContextProvider = ({
|
||||
}
|
||||
}
|
||||
|
||||
const { responseData } = await onStartChat({
|
||||
await onStartChat({
|
||||
messages,
|
||||
controller: chatController.current,
|
||||
generatingMessage,
|
||||
@@ -235,16 +246,20 @@ const PluginRunContextProvider = ({
|
||||
...formatVariables
|
||||
}
|
||||
});
|
||||
if (responseData?.[responseData.length - 1]?.error) {
|
||||
toast({
|
||||
title: responseData[responseData.length - 1].error?.message,
|
||||
status: 'error'
|
||||
});
|
||||
}
|
||||
|
||||
setChatRecords((state) =>
|
||||
state.map((item, index) => {
|
||||
if (index !== state.length - 1) return item;
|
||||
|
||||
// Check node response error
|
||||
const responseData = mergeChatResponseData(item.responseData || []);
|
||||
if (responseData[responseData.length - 1]?.error) {
|
||||
toast({
|
||||
title: t(responseData[responseData.length - 1].error?.message),
|
||||
status: 'error'
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
...item,
|
||||
status: 'finish',
|
||||
|
@@ -48,7 +48,7 @@ export const useChatTest = ({
|
||||
const histories = messages.slice(-1);
|
||||
|
||||
// 流请求,获取数据
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
const { responseText } = await streamFetch({
|
||||
url: '/api/core/chat/chatTest',
|
||||
data: {
|
||||
// Send histories and user messages
|
||||
@@ -66,7 +66,7 @@ export const useChatTest = ({
|
||||
abortCtrl: controller
|
||||
});
|
||||
|
||||
return { responseText, responseData };
|
||||
return { responseText };
|
||||
}
|
||||
);
|
||||
|
||||
|
@@ -115,7 +115,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
|
||||
}: StartChatFnProps) => {
|
||||
// Just send a user prompt
|
||||
const histories = messages.slice(-1);
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
const { responseText } = await streamFetch({
|
||||
data: {
|
||||
messages: histories,
|
||||
variables,
|
||||
@@ -137,7 +137,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
|
||||
title: newTitle
|
||||
}));
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidLoadChat.current };
|
||||
return { responseText, isNewChat: forbidLoadChat.current };
|
||||
},
|
||||
[appId, chatId, onUpdateHistoryTitle, setChatBoxData, forbidLoadChat]
|
||||
);
|
||||
|
@@ -151,7 +151,7 @@ const OutLink = (props: Props) => {
|
||||
'*'
|
||||
);
|
||||
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
const { responseText } = await streamFetch({
|
||||
data: {
|
||||
messages: histories,
|
||||
variables: {
|
||||
@@ -192,7 +192,7 @@ const OutLink = (props: Props) => {
|
||||
'*'
|
||||
);
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidLoadChat.current };
|
||||
return { responseText, isNewChat: forbidLoadChat.current };
|
||||
},
|
||||
[
|
||||
chatId,
|
||||
|
@@ -112,7 +112,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
|
||||
// Just send a user prompt
|
||||
const histories = messages.slice(-1);
|
||||
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
const { responseText } = await streamFetch({
|
||||
data: {
|
||||
messages: histories,
|
||||
variables: {
|
||||
@@ -144,7 +144,7 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
|
||||
title: newTitle
|
||||
}));
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidLoadChat.current };
|
||||
return { responseText, isNewChat: forbidLoadChat.current };
|
||||
},
|
||||
[
|
||||
chatId,
|
||||
|
Reference in New Issue
Block a user