4.8.11 fix (#2822)

* fix: tool choice hostiry error

* fix: chat page auth error redirect

* perf: ip redirect tip

* feat: fedomain env

* fix: tool desc empty

* feat: 4811 doc
This commit is contained in:
Archer
2024-09-27 15:52:33 +08:00
committed by GitHub
parent d259eda6b4
commit 98dbec2cf7
15 changed files with 135 additions and 166 deletions

View File

@@ -80,7 +80,9 @@ weight: 813
### 3. 修改镜像 tag 并重启
- 更新 FastGPT 镜像 tag: v4.8.11-alpha
- 更新 FastGPT 商业版镜像 tag: v4.8.11-alpha
- 更新 FastGPT Sandbox 镜像 tag: v4.8.11-alpha
## V4.8.11 更新说明
@@ -106,3 +108,4 @@ weight: 813
20. 修复 - 空 chatId 发起对话,首轮携带用户选择时会异常。
21. 修复 - createDataset 接口intro 为赋值。
22. 修复 - 对话框渲染性能问题。
23. 修复 - 工具调用历史记录存储不正确。

View File

@@ -16,6 +16,6 @@ export const bucketNameMap = {
}
};
export const ReadFileBaseUrl = '/api/common/file/read';
export const ReadFileBaseUrl = `${process.env.FE_DOMAIN || ''}/api/common/file/read`;
export const documentFileType = '.txt, .docx, .csv, .xlsx, .pdf, .md, .html, .pptx';

View File

@@ -289,7 +289,7 @@ export const GPTMessages2Chats = (
})
.filter((item) => item.value.length > 0);
// Merge data with the same dataId
// Merge data with the same dataIdSequential obj merging
const result = chatMessages.reduce((result: ChatItemType[], currentItem) => {
const lastItem = result[result.length - 1];

View File

@@ -25,7 +25,7 @@ type Option = {
series: SeriesData[]; // 使用定义的类型
};
const generateChart = (title: string, xAxis: string, yAxis: string, chartType: string) => {
const generateChart = async (title: string, xAxis: string, yAxis: string, chartType: string) => {
// @ts-ignore 无法使用dom如使用jsdom会出现生成图片无法正常展示有高手可以帮忙解决
const chart = echarts.init(undefined, undefined, {
renderer: 'svg', // 必须使用 SVG 模式
@@ -39,8 +39,9 @@ const generateChart = (title: string, xAxis: string, yAxis: string, chartType: s
try {
parsedXAxis = JSON.parse(xAxis);
parsedYAxis = JSON.parse(yAxis);
} catch (error: unknown) {
} catch (error: any) {
console.error('解析数据时出错:', error);
return Promise.reject('Data error');
}
const option: Option = {
@@ -86,7 +87,7 @@ const generateChart = (title: string, xAxis: string, yAxis: string, chartType: s
const main = async ({ title, xAxis, yAxis, chartType }: Props): Response => {
return {
result: generateChart(title, xAxis, yAxis, chartType)
result: await generateChart(title, xAxis, yAxis, chartType)
};
};

View File

@@ -26,9 +26,7 @@
"version": "481",
"inputs": [
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"selectedTypeIndex": 0,
"valueType": "string",
"canEdit": true,
@@ -46,9 +44,7 @@
"toolDescription": "BI图表的标题"
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"selectedTypeIndex": 0,
"valueType": "string",
"canEdit": true,
@@ -57,7 +53,7 @@
"description": "x轴数据",
"defaultValue": "",
"required": true,
"toolDescription": "x轴数据",
"toolDescription": "x轴数据,例如:['A', 'B', 'C']",
"list": [
{
"label": "",
@@ -66,9 +62,7 @@
]
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"selectedTypeIndex": 0,
"valueType": "string",
"canEdit": true,
@@ -83,18 +77,16 @@
}
],
"required": true,
"toolDescription": "y轴数据"
"toolDescription": "y轴数据,例如:['A', 'B', 'C']"
},
{
"renderTypeList": [
"select"
],
"renderTypeList": ["select"],
"selectedTypeIndex": 0,
"valueType": "string",
"canEdit": true,
"key": "chartType",
"label": "chartType",
"description": "图表类型,如柱状图,折线图,饼图",
"description": "图表类型柱状图,折线图,饼图",
"defaultValue": "",
"required": true,
"list": [
@@ -111,7 +103,7 @@
"value": "饼图"
}
],
"toolDescription": "图表类型,柱状图,折线图,饼图"
"toolDescription": "图表类型,目前支持三种: 柱状图,折线图,饼图"
}
],
"outputs": [
@@ -159,18 +151,13 @@
"version": "481",
"inputs": [
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"valueType": "string",
"canEdit": true,
"key": "图片base64数据",
"label": "图片base64数据",
"description": "可用使用markdown格式展示图片![图片](base64)",
"value": [
"ws0DFKJnCPhk",
"bzaYjKyQFOw2"
]
"key": "相对路径URL",
"label": "相对路径URL",
"description": "可用使用markdown格式展示图片![图片](url)",
"value": ["ws0DFKJnCPhk", "bzaYjKyQFOw2"]
}
],
"outputs": []
@@ -190,9 +177,7 @@
"inputs": [
{
"key": "system_addInputParam",
"renderTypeList": [
"addInputParam"
],
"renderTypeList": ["addInputParam"],
"valueType": "dynamic",
"label": "",
"required": false,
@@ -223,9 +208,7 @@
},
{
"key": "system_httpMethod",
"renderTypeList": [
"custom"
],
"renderTypeList": ["custom"],
"valueType": "string",
"label": "",
"value": "POST",
@@ -237,9 +220,7 @@
},
{
"key": "system_httpTimeout",
"renderTypeList": [
"custom"
],
"renderTypeList": ["custom"],
"valueType": "number",
"label": "",
"value": 30,
@@ -253,9 +234,7 @@
},
{
"key": "system_httpReqUrl",
"renderTypeList": [
"hidden"
],
"renderTypeList": ["hidden"],
"valueType": "string",
"label": "",
"description": "新的 HTTP 请求地址。如果出现两个“请求地址”,可以删除该模块重新加入,会拉取最新的模块配置。",
@@ -268,9 +247,7 @@
},
{
"key": "system_httpHeader",
"renderTypeList": [
"custom"
],
"renderTypeList": ["custom"],
"valueType": "any",
"value": [],
"label": "",
@@ -283,9 +260,7 @@
},
{
"key": "system_httpParams",
"renderTypeList": [
"hidden"
],
"renderTypeList": ["hidden"],
"valueType": "any",
"value": [],
"label": "",
@@ -297,9 +272,7 @@
},
{
"key": "system_httpJsonBody",
"renderTypeList": [
"hidden"
],
"renderTypeList": ["hidden"],
"valueType": "any",
"value": "{\r\n \"title\": \"{{title-plugin}}\",\r\n \"xAxis\": \"{{xAxis-plugin}}\",\r\n \"yAxis\": \"{{yAxis-plugin}}\",\r\n \"chartType\": \"{{chartType-plugin}}\"\r\n}",
"label": "",
@@ -311,9 +284,7 @@
},
{
"key": "system_httpFormBody",
"renderTypeList": [
"hidden"
],
"renderTypeList": ["hidden"],
"valueType": "any",
"value": [],
"label": "",
@@ -325,9 +296,7 @@
},
{
"key": "system_httpContentType",
"renderTypeList": [
"hidden"
],
"renderTypeList": ["hidden"],
"valueType": "string",
"value": "json",
"label": "",
@@ -338,9 +307,7 @@
"toolDescription": ""
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"valueType": "string",
"canEdit": true,
"key": "title-plugin",
@@ -366,15 +333,10 @@
"showDefaultValue": true
},
"required": true,
"value": [
"pluginInput",
"title"
]
"value": ["pluginInput", "title"]
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"valueType": "string",
"canEdit": true,
"key": "xAxis-plugin",
@@ -400,15 +362,10 @@
"showDefaultValue": true
},
"required": true,
"value": [
"pluginInput",
"xAxis"
]
"value": ["pluginInput", "xAxis"]
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"valueType": "string",
"canEdit": true,
"key": "yAxis-plugin",
@@ -434,15 +391,10 @@
"showDefaultValue": true
},
"required": true,
"value": [
"pluginInput",
"yAxis"
]
"value": ["pluginInput", "yAxis"]
},
{
"renderTypeList": [
"reference"
],
"renderTypeList": ["reference"],
"valueType": "string",
"canEdit": true,
"key": "chartType-plugin",
@@ -468,10 +420,7 @@
"showDefaultValue": true
},
"required": true,
"value": [
"pluginInput",
"chartType"
]
"value": ["pluginInput", "chartType"]
}
],
"outputs": [

View File

@@ -127,7 +127,7 @@ export const loadRequestMessages = async ({
})();
// If imgUrl is a local path, load image from local, and set url to base64
if (imgUrl.startsWith('/') || process.env.VISION_FOCUS_BASE64 === 'true') {
if (imgUrl.startsWith('/')) {
addLog.debug('Load image from local server', {
baseUrl: serverRequestBaseUrl,
requestUrl: imgUrl
@@ -234,7 +234,13 @@ export const loadRequestMessages = async ({
}
}
if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant) {
if (item.content !== undefined && !item.content) return;
if (
item.content !== undefined &&
!item.content &&
!item.tool_calls &&
!item.function_call
)
return;
if (Array.isArray(item.content) && item.content.length === 0) return;
}

View File

@@ -244,33 +244,36 @@ export const runToolWithFunctionCall = async (
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: functionCall
};
/*
...
user
assistant: tool data
*/
const concatToolMessages = [
...requestMessages,
assistantToolMsgParams
] as ChatCompletionMessageParam[];
// Only toolCall tokens are counted here, Tool response tokens count towards the next reply
const tokens = await countGptMessagesTokens(concatToolMessages, undefined, functions);
/*
...
user
assistant: tool data
tool: tool response
*/
const completeMessages = [
...concatToolMessages,
...toolsRunResponse.map((item) => item?.functionCallMsg)
];
// console.log(tokens, 'tool');
// tool assistant
const toolAssistants = toolsRunResponse
.map((item) => {
const assistantResponses = item.toolRunResponse.assistantResponses || [];
return assistantResponses;
})
.flat();
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistant = GPTMessages2Chats([
assistantToolMsgParams,
...toolsRunResponse.map((item) => item?.functionCallMsg)
])[0] as AIChatItemType;
const toolNodeAssistants = [
...assistantResponses,
...toolAssistants,
...toolNodeAssistant.value
];
const toolNodeAssistants = [...assistantResponses, ...toolNodeAssistant.value];
// concat tool responses
const dispatchFlowResponse = response
@@ -285,7 +288,7 @@ export const runToolWithFunctionCall = async (
return {
dispatchFlowResponse,
totalTokens: response?.totalTokens ? response.totalTokens + tokens : tokens,
completeMessages: filterMessages,
completeMessages,
assistantResponses: toolNodeAssistants,
runTimes:
(response?.runTimes || 0) +

View File

@@ -280,27 +280,37 @@ export const runToolWithPromptCall = async (
role: ChatCompletionRequestMessageRoleEnum.Assistant,
function_call: toolJson
};
/*
...
user
assistant: tool data
*/
const concatToolMessages = [
...requestMessages,
assistantToolMsgParams
] as ChatCompletionMessageParam[];
// Only toolCall tokens are counted here, Tool response tokens count towards the next reply
const tokens = await countGptMessagesTokens(concatToolMessages, undefined);
const completeMessages: ChatCompletionMessageParam[] = [
...concatToolMessages,
{
role: ChatCompletionRequestMessageRoleEnum.Function,
name: toolJson.name,
content: toolsRunResponse.toolResponsePrompt
}
];
// tool assistant
const toolAssistants = toolsRunResponse.moduleRunResponse.assistantResponses || [];
/*
...
user
assistant: tool data
function: tool response
*/
const functionResponseMessage: ChatCompletionMessageParam = {
role: ChatCompletionRequestMessageRoleEnum.Function,
name: toolJson.name,
content: toolsRunResponse.toolResponsePrompt
};
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistants = [...assistantResponses, ...toolAssistants, ...toolNodeAssistant.value];
const toolNodeAssistant = GPTMessages2Chats([
assistantToolMsgParams,
functionResponseMessage
])[0] as AIChatItemType;
const toolNodeAssistants = [...assistantResponses, ...toolNodeAssistant.value];
const dispatchFlowResponse = response
? response.dispatchFlowResponse.concat(toolsRunResponse.moduleRunResponse)

View File

@@ -6,7 +6,6 @@ import {
ChatCompletionMessageToolCall,
StreamChatType,
ChatCompletionToolMessageParam,
ChatCompletionAssistantToolParam,
ChatCompletionMessageParam,
ChatCompletionTool,
ChatCompletionAssistantMessageParam
@@ -54,7 +53,6 @@ export const runToolWithToolChoice = async (
res,
requestOrigin,
runtimeNodes,
node,
stream,
workflowStreamResponse,
params: { temperature = 0, maxToken = 4000, aiChatVision }
@@ -86,7 +84,7 @@ export const runToolWithToolChoice = async (
type: 'function',
function: {
name: item.nodeId,
description: item.intro,
description: item.intro || item.name,
parameters: {
type: 'object',
properties,
@@ -282,12 +280,24 @@ export const runToolWithToolChoice = async (
).filter(Boolean) as ToolRunResponseType;
const flatToolsResponseData = toolsRunResponse.map((item) => item.toolRunResponse).flat();
if (toolCalls.length > 0 && !res?.closed) {
// Run the tool, combine its results, and perform another round of AI calls
const assistantToolMsgParams: ChatCompletionAssistantToolParam = {
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls: toolCalls
};
const assistantToolMsgParams: ChatCompletionAssistantMessageParam[] = [
...(answer
? [
{
role: ChatCompletionRequestMessageRoleEnum.Assistant as 'assistant',
content: answer
}
]
: []),
{
role: ChatCompletionRequestMessageRoleEnum.Assistant,
tool_calls: toolCalls
}
];
/*
...
user
@@ -295,8 +305,10 @@ export const runToolWithToolChoice = async (
*/
const concatToolMessages = [
...requestMessages,
assistantToolMsgParams
...assistantToolMsgParams
] as ChatCompletionMessageParam[];
// Only toolCall tokens are counted here, Tool response tokens count towards the next reply
const tokens = await countGptMessagesTokens(concatToolMessages, tools);
/*
...
@@ -309,25 +321,12 @@ export const runToolWithToolChoice = async (
...toolsRunResponse.map((item) => item?.toolMsgParams)
];
// console.log(tokens, 'tool');
// tool assistant
const toolAssistants = toolsRunResponse
.map((item) => {
const assistantResponses = item.toolRunResponse.assistantResponses || [];
return assistantResponses;
})
.flat();
// tool node assistant
const adaptChatMessages = GPTMessages2Chats(completeMessages);
const toolNodeAssistant = adaptChatMessages.pop() as AIChatItemType;
const toolNodeAssistants = [
...assistantResponses,
...toolAssistants,
...toolNodeAssistant.value
];
// Assistant tool response adapt to chatStore
const toolNodeAssistant = GPTMessages2Chats([
...assistantToolMsgParams,
...toolsRunResponse.map((item) => item?.toolMsgParams)
])[0] as AIChatItemType;
const toolNodeAssistants = [...assistantResponses, ...toolNodeAssistant.value];
// concat tool responses
const dispatchFlowResponse = response
@@ -373,7 +372,6 @@ export const runToolWithToolChoice = async (
};
const completeMessages = filterMessages.concat(gptAssistantResponse);
const tokens = await countGptMessagesTokens(completeMessages, tools);
// console.log(tokens, 'response token');
// concat tool assistant
const toolNodeAssistant = GPTMessages2Chats([gptAssistantResponse])[0] as AIChatItemType;

View File

@@ -91,8 +91,9 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
: {}),
runningAppInfo: {
id: String(plugin.id),
teamId: plugin.teamId || '',
tmbId: pluginData?.tmbId || ''
// 如果是系统插件,则使用当前团队的 teamId 和 tmbId
teamId: plugin.teamId || runningAppInfo.teamId,
tmbId: pluginData?.tmbId || runningAppInfo.tmbId
},
variables: runtimeVariables,
query: getPluginRunUserQuery({

View File

@@ -30,8 +30,8 @@ MILVUS_TOKEN=133964348b00b4b4e4b51bef680a61350950385c8c64a3ec16b1ab92d3c67dcc4e0
SANDBOX_URL=http://localhost:3001
# 商业版地址
PRO_URL=
# 首页路径
HOME_URL=/
# 页面的地址,用于自动补全相对路径资源的 domain
# FE_DOMAIN=http://localhost:3000
# 日志等级: debug, info, warn, error
LOG_LEVEL=debug

View File

@@ -34,6 +34,7 @@ import { useChat } from '@/components/core/chat/ChatContainer/useChat';
import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import { useSystem } from '@fastgpt/web/hooks/useSystem';
import { InitChatResponse } from '@/global/core/chat/api';
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
const CustomPluginRunBox = dynamic(() => import('./components/CustomPluginRunBox'));
@@ -111,8 +112,14 @@ const Chat = ({
// reset all chat tore
if (e?.code === 501) {
router.replace('/app/list');
} else if (chatId) {
onChangeChatId();
} else {
router.replace({
query: {
...router.query,
appId: myApps[0]?._id,
chatId: ''
}
});
}
},
onFinally() {

View File

@@ -210,9 +210,8 @@ const OutLink = ({
}
},
onError(e: any) {
console.log(e);
if (chatId) {
onChangeChatId();
onChangeChatId('');
}
},
onFinally() {

View File

@@ -11,14 +11,10 @@ import ChatHistorySlider from './components/ChatHistorySlider';
import ChatHeader from './components/ChatHeader';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
import ChatBox from '@/components/core/chat/ChatContainer/ChatBox';
import type { StartChatFnProps } from '@/components/core/chat/ChatContainer/type';
import { streamFetch } from '@/web/common/api/fetch';
import { getChatTitleFromChatMessage } from '@fastgpt/global/core/chat/utils';
import { ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { getErrText } from '@fastgpt/global/common/error/utils';
import SliderApps from './components/SliderApps';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
@@ -164,12 +160,9 @@ const Chat = ({ myApps }: { myApps: AppListItemType[] }) => {
manual: false,
refreshDeps: [teamId, teamToken, appId, chatId],
onError(e: any) {
toast({
title: getErrText(e, t('common:core.chat.Failed to initialize chat')),
status: 'error'
});
console.log(e);
if (chatId) {
onChangeChatId();
onChangeChatId('');
}
},
onFinally() {

View File

@@ -88,10 +88,9 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
router.prefetch('/app/list');
});
const [showRedirect, setShowRedirect] = useLocalStorageState<boolean>('showRedirect', {
const [showRedirect, setShowRedirect] = useLocalStorageState<boolean>('chinese_ip_redirect', {
defaultValue: true
});
const checkIpInChina = useCallback(() => {
const onSuccess = (res: any) => {
if (!res.country.iso_code) {