diff --git a/docSite/content/docs/development/configuration.md b/docSite/content/docs/development/configuration.md index 60eafc8ed..7dfdcd143 100644 --- a/docSite/content/docs/development/configuration.md +++ b/docSite/content/docs/development/configuration.md @@ -20,14 +20,13 @@ llm模型全部合并 ```json { "systemEnv": { - "openapiPrefix": "fastgpt", "vectorMaxProcess": 15, "qaMaxProcess": 15, - "pgHNSWEfSearch": 100 + "pgHNSWEfSearch": 100 // 向量搜索参数。越大,搜索越精确,但是速度越慢。设置为100,有99%+精度。 }, "llmModels": [ { - "model": "gpt-3.5-turbo-1106", // 模型名 + "model": "gpt-3.5-turbo", // 模型名 "name": "gpt-3.5-turbo", // 别名 "maxContext": 16000, // 最大上下文 "maxResponse": 4000, // 最大回复 @@ -37,12 +36,16 @@ llm模型全部合并 "censor": false, "vision": false, // 是否支持图片输入 "datasetProcess": false, // 是否设置为知识库处理模型(QA),务必保证至少有一个为true,否则知识库会报错 - "toolChoice": true, // 是否支持工具选择 - "functionCall": false, // 是否支持函数调用 + "usedInClassify": true, // 是否用于问题分类(务必保证至少有一个为true) + "usedInExtractFields": true, // 是否用于内容提取(务必保证至少有一个为true) + "useInToolCall": true, // 是否用于工具调用(务必保证至少有一个为true) + "usedInQueryExtension": true, // 是否用于问题优化(务必保证至少有一个为true) + "toolChoice": true, // 是否支持工具选择(务必保证至少有一个为true) + "functionCall": false, // 是否支持函数调用(特殊功能,会优先使用 toolChoice,如果为false,则使用 functionCall,如果仍为 false,则使用提示词模式) "customCQPrompt": "", // 自定义文本分类提示词(不支持工具和函数调用的模型 "customExtractPrompt": "", // 自定义内容提取提示词 "defaultSystemChatPrompt": "", // 对话默认携带的系统提示词 - "defaultConfig":{} // 对话默认配置(比如 GLM4 的 top_p + "defaultConfig":{} // LLM默认配置,可以针对不同模型设置特殊值(比如 GLM4 的 top_p }, { "model": "gpt-3.5-turbo-16k", @@ -55,6 +58,10 @@ llm模型全部合并 "censor": false, "vision": false, "datasetProcess": true, + "usedInClassify": true, + "usedInExtractFields": true, + "useInToolCall": true, + "usedInQueryExtension": true, "toolChoice": true, "functionCall": false, "customCQPrompt": "", @@ -73,6 +80,10 @@ llm模型全部合并 "censor": false, "vision": false, "datasetProcess": false, + "usedInClassify": true, + "usedInExtractFields": true, + "useInToolCall": true, + "usedInQueryExtension": true, "toolChoice": true, "functionCall": false, "customCQPrompt": "", @@ -91,6 +102,10 @@ llm模型全部合并 "censor": false, "vision": true, "datasetProcess": false, + "usedInClassify": false, + "usedInExtractFields": false, + "useInToolCall": false, + "usedInQueryExtension": false, "toolChoice": true, "functionCall": false, "customCQPrompt": "", diff --git a/docSite/content/docs/development/one-api.md b/docSite/content/docs/development/one-api.md index ce597d432..d604c885f 100644 --- a/docSite/content/docs/development/one-api.md +++ b/docSite/content/docs/development/one-api.md @@ -120,6 +120,10 @@ CHAT_API_KEY=sk-xxxxxx "censor": false, "vision": false, // 是否支持图片输入 "datasetProcess": false, // 是否设置为知识库处理模型 + "usedInClassify": true, // 是否用于问题分类 + "usedInExtractFields": true, // 是否用于字段提取 + "useInToolCall": true, // 是否用于工具调用 + "usedInQueryExtension": true, // 是否用于问题优化 "toolChoice": true, // 是否支持工具选择 "functionCall": false, // 是否支持函数调用 "customCQPrompt": "", // 自定义文本分类提示词(不支持工具和函数调用的模型 diff --git a/docSite/content/docs/development/upgrading/47.md b/docSite/content/docs/development/upgrading/47.md new file mode 100644 index 000000000..512b669d8 --- /dev/null +++ b/docSite/content/docs/development/upgrading/47.md @@ -0,0 +1,19 @@ +--- +title: 'V4.7(进行中)' +description: 'FastGPT V4.7更新说明' +icon: 'upgrade' +draft: false +toc: true +weight: 826 +--- + +## 修改配置文件 + +增加一些 Boolean 值,用于决定不同功能块可以使用哪些模型:[点击查看最新的配置文件](/docs/development/configuration/) + + +## V4.7 更新说明 + +1. 新增 - 工具调用模块,可以让LLM模型根据用户意图,动态的选择其他模型或插件执行。 +2. 新增 - 分类和内容提取支持 functionCall 模式。部分模型支持 functionCall 不支持 ToolCall,也可以使用了。需要把 LLM 模型配置文件里的 `functionCall` 设置为 `true`, `toolChoice`设置为 `false`。如果 `toolChoice` 为 true,会走 tool 模式。 +3. 优化 - 高级编排性能 diff --git a/package.json b/package.json index 573b60405..4bce5cf08 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "prepare": "husky install", "format-code": "prettier --config \"./.prettierrc.js\" --write \"./**/src/**/*.{ts,tsx,scss}\"", "format-doc": "zhlint --dir ./docSite *.md --fix", - "gen:theme-typings": "chakra-cli tokens projects/app/src/web/styles/theme.ts --out node_modules/.pnpm/node_modules/@chakra-ui/styled-system/dist/theming.types.d.ts", + "gen:theme-typings": "chakra-cli tokens packages/web/styles/theme.ts --out node_modules/.pnpm/node_modules/@chakra-ui/styled-system/dist/theming.types.d.ts", "postinstall": "sh ./scripts/postinstall.sh", "initIcon": "node ./scripts/icon/init.js", "previewIcon": "node ./scripts/icon/index.js" diff --git a/packages/global/common/string/tiktoken/index.ts b/packages/global/common/string/tiktoken/index.ts index ef1e106da..9b8f1d40c 100644 --- a/packages/global/common/string/tiktoken/index.ts +++ b/packages/global/common/string/tiktoken/index.ts @@ -1,10 +1,15 @@ /* Only the token of gpt-3.5-turbo is used */ import type { ChatItemType } from '../../../core/chat/type'; import { Tiktoken } from 'js-tiktoken/lite'; -import { adaptChat2GptMessages } from '../../../core/chat/adapt'; -import { ChatCompletionRequestMessageRoleEnum } from '../../../core/ai/constant'; +import { chats2GPTMessages } from '../../../core/chat/adapt'; import encodingJson from './cl100k_base.json'; -import { ChatMessageItemType } from '../../../core/ai/type'; +import { + ChatCompletionMessageParam, + ChatCompletionContentPart, + ChatCompletionCreateParams, + ChatCompletionTool +} from '../../../core/ai/type'; +import { ChatCompletionRequestMessageRoleEnum } from '../../../core/ai/constants'; /* init tikToken obj */ export function getTikTokenEnc() { @@ -29,18 +34,25 @@ export function getTikTokenEnc() { /* count one prompt tokens */ export function countPromptTokens( - prompt = '', - role: '' | `${ChatCompletionRequestMessageRoleEnum}` = '', - tools?: any + prompt: string | ChatCompletionContentPart[] | null | undefined = '', + role: '' | `${ChatCompletionRequestMessageRoleEnum}` = '' ) { const enc = getTikTokenEnc(); - const toolText = tools - ? JSON.stringify(tools) - .replace('"', '') - .replace('\n', '') - .replace(/( ){2,}/g, ' ') - : ''; - const text = `${role}\n${prompt}\n${toolText}`.trim(); + const promptText = (() => { + if (!prompt) return ''; + if (typeof prompt === 'string') return prompt; + let promptText = ''; + prompt.forEach((item) => { + if (item.type === 'text') { + promptText += item.text; + } else if (item.type === 'image_url') { + promptText += item.image_url.url; + } + }); + return promptText; + })(); + + const text = `${role}\n${promptText}`.trim(); try { const encodeText = enc.encode(text); @@ -50,15 +62,66 @@ export function countPromptTokens( return text.length; } } +export const countToolsTokens = ( + tools?: ChatCompletionTool[] | ChatCompletionCreateParams.Function[] +) => { + if (!tools || tools.length === 0) return 0; + + const enc = getTikTokenEnc(); + + const toolText = tools + ? JSON.stringify(tools) + .replace('"', '') + .replace('\n', '') + .replace(/( ){2,}/g, ' ') + : ''; + + return enc.encode(toolText).length; +}; /* count messages tokens */ -export const countMessagesTokens = (messages: ChatItemType[], tools?: any) => { - const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true }); +export const countMessagesTokens = (messages: ChatItemType[]) => { + const adaptMessages = chats2GPTMessages({ messages, reserveId: true }); - return countGptMessagesTokens(adaptMessages, tools); + return countGptMessagesTokens(adaptMessages); }; -export const countGptMessagesTokens = (messages: ChatMessageItemType[], tools?: any) => - messages.reduce((sum, item) => sum + countPromptTokens(item.content, item.role, tools), 0); +export const countGptMessagesTokens = ( + messages: ChatCompletionMessageParam[], + tools?: ChatCompletionTool[], + functionCall?: ChatCompletionCreateParams.Function[] +) => + messages.reduce((sum, item) => { + // Evaluates the text of toolcall and functioncall + const functionCallPrompt = (() => { + let prompt = ''; + if (item.role === ChatCompletionRequestMessageRoleEnum.Assistant) { + const toolCalls = item.tool_calls; + prompt += + toolCalls + ?.map((item) => `${item?.function?.name} ${item?.function?.arguments}`.trim()) + ?.join('') || ''; + + const functionCall = item.function_call; + prompt += `${functionCall?.name} ${functionCall?.arguments}`.trim(); + } + return prompt; + })(); + + const contentPrompt = (() => { + if (!item.content) return ''; + if (typeof item.content === 'string') return item.content; + return item.content + .map((item) => { + if (item.type === 'text') return item.text; + return ''; + }) + .join(''); + })(); + + return sum + countPromptTokens(`${contentPrompt}${functionCallPrompt}`, item.role); + }, 0) + + countToolsTokens(tools) + + countToolsTokens(functionCall); /* slice messages from top to bottom by maxTokens */ export function sliceMessagesTB({ @@ -68,7 +131,7 @@ export function sliceMessagesTB({ messages: ChatItemType[]; maxTokens: number; }) { - const adaptMessages = adaptChat2GptMessages({ messages, reserveId: true }); + const adaptMessages = chats2GPTMessages({ messages, reserveId: true }); let reduceTokens = maxTokens; let result: ChatItemType[] = []; diff --git a/packages/global/core/ai/constant.ts b/packages/global/core/ai/constant.ts deleted file mode 100644 index 9e44d3ccc..000000000 --- a/packages/global/core/ai/constant.ts +++ /dev/null @@ -1,7 +0,0 @@ -export enum ChatCompletionRequestMessageRoleEnum { - 'System' = 'system', - 'User' = 'user', - 'Assistant' = 'assistant', - 'Function' = 'function', - 'Tool' = 'tool' -} diff --git a/packages/global/core/ai/constants.ts b/packages/global/core/ai/constants.ts new file mode 100644 index 000000000..a697841dc --- /dev/null +++ b/packages/global/core/ai/constants.ts @@ -0,0 +1,27 @@ +export enum ChatCompletionRequestMessageRoleEnum { + 'System' = 'system', + 'User' = 'user', + 'Assistant' = 'assistant', + 'Function' = 'function', + 'Tool' = 'tool' +} + +export enum ChatMessageTypeEnum { + text = 'text', + image_url = 'image_url' +} + +export enum LLMModelTypeEnum { + all = 'all', + classify = 'classify', + extractFields = 'extractFields', + toolCall = 'toolCall', + queryExtension = 'queryExtension' +} +export const llmModelTypeFilterMap = { + [LLMModelTypeEnum.all]: 'model', + [LLMModelTypeEnum.classify]: 'usedInClassify', + [LLMModelTypeEnum.extractFields]: 'usedInExtractFields', + [LLMModelTypeEnum.toolCall]: 'usedInToolCall', + [LLMModelTypeEnum.queryExtension]: 'usedInQueryExtension' +}; diff --git a/packages/global/core/ai/model.d.ts b/packages/global/core/ai/model.d.ts index e5b783a92..3d9efaa28 100644 --- a/packages/global/core/ai/model.d.ts +++ b/packages/global/core/ai/model.d.ts @@ -10,7 +10,13 @@ export type LLMModelItemType = { censor?: boolean; vision?: boolean; - datasetProcess?: boolean; + + // diff function model + datasetProcess?: boolean; // dataset + usedInClassify?: boolean; // classify + usedInExtractFields?: boolean; // extract fields + usedInToolCall?: boolean; // tool call + usedInQueryExtension?: boolean; // query extension functionCall: boolean; toolChoice: boolean; diff --git a/packages/global/core/ai/type.d.ts b/packages/global/core/ai/type.d.ts index 1aaefb121..6fa1051f9 100644 --- a/packages/global/core/ai/type.d.ts +++ b/packages/global/core/ai/type.d.ts @@ -1,20 +1,33 @@ +import openai from 'openai'; import type { - ChatCompletion, - ChatCompletionCreateParams, + ChatCompletionMessageToolCall, ChatCompletionChunk, ChatCompletionMessageParam, - ChatCompletionContentPart + ChatCompletionToolMessageParam, + ChatCompletionAssistantMessageParam } from 'openai/resources'; +import { ChatMessageTypeEnum } from './constants'; -export type ChatCompletionContentPart = ChatCompletionContentPart; -export type ChatCompletionCreateParams = ChatCompletionCreateParams; -export type ChatMessageItemType = Omit & { - name?: any; +export * from 'openai/resources'; + +export type ChatCompletionMessageParam = ChatCompletionMessageParam & { dataId?: string; - content: any; -} & any; +}; +export type ChatCompletionToolMessageParam = ChatCompletionToolMessageParam & { name: string }; +export type ChatCompletionAssistantToolParam = { + role: 'assistant'; + tool_calls: ChatCompletionMessageToolCall[]; +}; -export type ChatCompletion = ChatCompletion; +export type ChatCompletionMessageToolCall = ChatCompletionMessageToolCall & { + toolName?: string; + toolAvatar?: string; +}; +export type ChatCompletionMessageFunctionCall = ChatCompletionAssistantMessageParam.FunctionCall & { + id?: string; + toolName?: string; + toolAvatar?: string; +}; export type StreamChatType = Stream; export type PromptTemplateItem = { @@ -22,3 +35,6 @@ export type PromptTemplateItem = { desc: string; value: string; }; + +export default openai; +export * from 'openai'; diff --git a/packages/global/core/chat/adapt.ts b/packages/global/core/chat/adapt.ts index c63417bcd..31320e9d4 100644 --- a/packages/global/core/chat/adapt.ts +++ b/packages/global/core/chat/adapt.ts @@ -1,40 +1,298 @@ -import type { ChatItemType } from '../../core/chat/type.d'; -import { ChatRoleEnum } from '../../core/chat/constants'; -import { ChatCompletionRequestMessageRoleEnum } from '../../core/ai/constant'; -import type { ChatMessageItemType } from '../../core/ai/type.d'; +import type { + ChatItemType, + ChatItemValueItemType, + RuntimeUserPromptType, + UserChatItemType +} from '../../core/chat/type.d'; +import { ChatFileTypeEnum, ChatItemValueTypeEnum, ChatRoleEnum } from '../../core/chat/constants'; +import type { + ChatCompletionContentPart, + ChatCompletionFunctionMessageParam, + ChatCompletionMessageFunctionCall, + ChatCompletionMessageParam, + ChatCompletionMessageToolCall, + ChatCompletionToolMessageParam +} from '../../core/ai/type.d'; +import { ChatCompletionRequestMessageRoleEnum } from '../../core/ai/constants'; -const chat2Message = { - [ChatRoleEnum.AI]: ChatCompletionRequestMessageRoleEnum.Assistant, - [ChatRoleEnum.Human]: ChatCompletionRequestMessageRoleEnum.User, - [ChatRoleEnum.System]: ChatCompletionRequestMessageRoleEnum.System, - [ChatRoleEnum.Function]: ChatCompletionRequestMessageRoleEnum.Function, - [ChatRoleEnum.Tool]: ChatCompletionRequestMessageRoleEnum.Tool -}; -const message2Chat = { +const GPT2Chat = { [ChatCompletionRequestMessageRoleEnum.System]: ChatRoleEnum.System, [ChatCompletionRequestMessageRoleEnum.User]: ChatRoleEnum.Human, [ChatCompletionRequestMessageRoleEnum.Assistant]: ChatRoleEnum.AI, - [ChatCompletionRequestMessageRoleEnum.Function]: ChatRoleEnum.Function, - [ChatCompletionRequestMessageRoleEnum.Tool]: ChatRoleEnum.Tool + [ChatCompletionRequestMessageRoleEnum.Function]: ChatRoleEnum.AI, + [ChatCompletionRequestMessageRoleEnum.Tool]: ChatRoleEnum.AI }; -export function adaptRole_Chat2Message(role: `${ChatRoleEnum}`) { - return chat2Message[role]; -} export function adaptRole_Message2Chat(role: `${ChatCompletionRequestMessageRoleEnum}`) { - return message2Chat[role]; + return GPT2Chat[role]; } -export const adaptChat2GptMessages = ({ +export const simpleUserContentPart = (content: ChatCompletionContentPart[]) => { + if (content.length === 1 && content[0].type === 'text') { + return content[0].text; + } + return content; +}; + +export const chats2GPTMessages = ({ messages, - reserveId + reserveId, + reserveTool = false }: { messages: ChatItemType[]; reserveId: boolean; -}): ChatMessageItemType[] => { - return messages.map((item) => ({ - ...(reserveId && { dataId: item.dataId }), - role: chat2Message[item.obj], - content: item.value || '' - })); + reserveTool?: boolean; +}): ChatCompletionMessageParam[] => { + let results: ChatCompletionMessageParam[] = []; + + messages.forEach((item) => { + const dataId = reserveId ? item.dataId : undefined; + if (item.obj === ChatRoleEnum.Human) { + const value = item.value + .map((item) => { + if (item.type === ChatItemValueTypeEnum.text) { + return { + type: 'text', + text: item.text?.content || '' + }; + } + if (item.type === 'file' && item.file?.type === ChatFileTypeEnum.image) { + return { + type: 'image_url', + image_url: { + url: item.file?.url || '' + } + }; + } + return; + }) + .filter(Boolean) as ChatCompletionContentPart[]; + + results.push({ + dataId, + role: ChatCompletionRequestMessageRoleEnum.User, + content: simpleUserContentPart(value) + }); + } else if (item.obj === ChatRoleEnum.System) { + const content = item.value?.[0]?.text?.content; + if (content) { + results.push({ + dataId, + role: ChatCompletionRequestMessageRoleEnum.System, + content + }); + } + } else { + item.value.forEach((value) => { + if (value.type === ChatItemValueTypeEnum.tool && value.tools && reserveTool) { + const tool_calls: ChatCompletionMessageToolCall[] = []; + const toolResponse: ChatCompletionToolMessageParam[] = []; + value.tools.forEach((tool) => { + tool_calls.push({ + id: tool.id, + type: 'function', + function: { + name: tool.functionName, + arguments: tool.params + } + }); + toolResponse.push({ + tool_call_id: tool.id, + role: ChatCompletionRequestMessageRoleEnum.Tool, + name: tool.functionName, + content: tool.response + }); + }); + results = results + .concat({ + dataId, + role: ChatCompletionRequestMessageRoleEnum.Assistant, + tool_calls + }) + .concat(toolResponse); + } else if (value.text) { + results.push({ + dataId, + role: ChatCompletionRequestMessageRoleEnum.Assistant, + content: value.text.content + }); + } + }); + } + }); + + return results; +}; +export const GPTMessages2Chats = ( + messages: ChatCompletionMessageParam[], + reserveTool = true +): ChatItemType[] => { + return messages + .map((item) => { + const value: ChatItemType['value'] = []; + const obj = GPT2Chat[item.role]; + + if ( + obj === ChatRoleEnum.System && + item.role === ChatCompletionRequestMessageRoleEnum.System + ) { + value.push({ + type: ChatItemValueTypeEnum.text, + text: { + content: item.content + } + }); + } else if ( + obj === ChatRoleEnum.Human && + item.role === ChatCompletionRequestMessageRoleEnum.User + ) { + if (typeof item.content === 'string') { + value.push({ + type: ChatItemValueTypeEnum.text, + text: { + content: item.content + } + }); + } else if (Array.isArray(item.content)) { + item.content.forEach((item) => { + if (item.type === 'text') { + value.push({ + type: ChatItemValueTypeEnum.text, + text: { + content: item.text + } + }); + } else if (item.type === 'image_url') { + value.push({ + //@ts-ignore + type: 'file', + file: { + type: ChatFileTypeEnum.image, + name: '', + url: item.image_url.url + } + }); + } + }); + // @ts-ignore + } + } else if ( + obj === ChatRoleEnum.AI && + item.role === ChatCompletionRequestMessageRoleEnum.Assistant + ) { + if (item.content && typeof item.content === 'string') { + value.push({ + type: ChatItemValueTypeEnum.text, + text: { + content: item.content + } + }); + } else if (item.tool_calls && reserveTool) { + // save tool calls + const toolCalls = item.tool_calls as ChatCompletionMessageToolCall[]; + value.push({ + //@ts-ignore + type: ChatItemValueTypeEnum.tool, + tools: toolCalls.map((tool) => { + let toolResponse = + messages.find( + (msg) => + msg.role === ChatCompletionRequestMessageRoleEnum.Tool && + msg.tool_call_id === tool.id + )?.content || ''; + toolResponse = + typeof toolResponse === 'string' ? toolResponse : JSON.stringify(toolResponse); + + return { + id: tool.id, + toolName: tool.toolName || '', + toolAvatar: tool.toolAvatar || '', + functionName: tool.function.name, + params: tool.function.arguments, + response: toolResponse as string + }; + }) + }); + } else if (item.function_call && reserveTool) { + const functionCall = item.function_call as ChatCompletionMessageFunctionCall; + const functionResponse = messages.find( + (msg) => + msg.role === ChatCompletionRequestMessageRoleEnum.Function && + msg.name === item.function_call?.name + ) as ChatCompletionFunctionMessageParam; + + if (functionResponse) { + value.push({ + //@ts-ignore + type: ChatItemValueTypeEnum.tool, + tools: [ + { + id: functionCall.id || '', + toolName: functionCall.toolName || '', + toolAvatar: functionCall.toolAvatar || '', + functionName: functionCall.name, + params: functionCall.arguments, + response: functionResponse.content || '' + } + ] + }); + } + } + } + + return { + dataId: item.dataId, + obj, + value + } as ChatItemType; + }) + .filter((item) => item.value.length > 0); +}; + +export const chatValue2RuntimePrompt = (value: ChatItemValueItemType[]): RuntimeUserPromptType => { + const prompt: RuntimeUserPromptType = { + files: [], + text: '' + }; + value.forEach((item) => { + if (item.type === 'file' && item.file) { + prompt.files?.push(item.file); + } else if (item.text) { + prompt.text += item.text.content; + } + }); + return prompt; +}; + +export const runtimePrompt2ChatsValue = ( + prompt: RuntimeUserPromptType +): UserChatItemType['value'] => { + const value: UserChatItemType['value'] = []; + if (prompt.files) { + prompt.files.forEach((file) => { + value.push({ + type: ChatItemValueTypeEnum.file, + file + }); + }); + } + if (prompt.text) { + value.push({ + type: ChatItemValueTypeEnum.text, + text: { + content: prompt.text + } + }); + } + return value; +}; + +export const getSystemPrompt = (prompt?: string): ChatItemType[] => { + if (!prompt) return []; + return [ + { + obj: ChatRoleEnum.System, + value: [{ type: ChatItemValueTypeEnum.text, text: { content: prompt } }] + } + ]; }; diff --git a/packages/global/core/chat/constants.ts b/packages/global/core/chat/constants.ts index a43b62387..ae9a78695 100644 --- a/packages/global/core/chat/constants.ts +++ b/packages/global/core/chat/constants.ts @@ -1,28 +1,30 @@ export enum ChatRoleEnum { System = 'System', Human = 'Human', - AI = 'AI', - Function = 'Function', - Tool = 'Tool' + AI = 'AI' } export const ChatRoleMap = { [ChatRoleEnum.System]: { - name: '系统提示词' + name: '系统' }, [ChatRoleEnum.Human]: { name: '用户' }, [ChatRoleEnum.AI]: { name: 'AI' - }, - [ChatRoleEnum.Function]: { - name: 'Function' - }, - [ChatRoleEnum.Tool]: { - name: 'Tool' } }; +export enum ChatFileTypeEnum { + image = 'image', + file = 'file' +} +export enum ChatItemValueTypeEnum { + text = 'text', + file = 'file', + tool = 'tool' +} + export enum ChatSourceEnum { test = 'test', online = 'online', diff --git a/packages/global/core/chat/type.d.ts b/packages/global/core/chat/type.d.ts index 2f7001a6b..211daef3e 100644 --- a/packages/global/core/chat/type.d.ts +++ b/packages/global/core/chat/type.d.ts @@ -1,11 +1,20 @@ import { ClassifyQuestionAgentItemType } from '../module/type'; import { SearchDataResponseItemType } from '../dataset/type'; -import { ChatRoleEnum, ChatSourceEnum, ChatStatusEnum } from './constants'; +import { + ChatFileTypeEnum, + ChatItemValueTypeEnum, + ChatRoleEnum, + ChatSourceEnum, + ChatStatusEnum +} from './constants'; import { FlowNodeTypeEnum } from '../module/node/constant'; import { ModuleOutputKeyEnum } from '../module/constants'; +import { DispatchNodeResponseKeyEnum } from '../module/runtime/constants'; import { AppSchema } from '../app/type'; import type { AppSchema as AppType } from '@fastgpt/global/core/app/type.d'; import { DatasetSearchModeEnum } from '../dataset/constants'; +import { ChatBoxInputType } from '../../../../projects/app/src/components/ChatBox/type'; +import { DispatchNodeResponseType } from '../module/runtime/type.d'; export type ChatSchema = { _id: string; @@ -30,7 +39,53 @@ export type ChatWithAppSchema = Omit & { appId: AppSchema; }; -export type ChatItemSchema = { +export type UserChatItemValueItemType = { + type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.file; + text?: { + content: string; + }; + file?: { + type: `${ChatFileTypeEnum}`; + name?: string; + url: string; + }; +}; +export type UserChatItemType = { + obj: ChatRoleEnum.Human; + value: UserChatItemValueItemType[]; +}; +export type SystemChatItemValueItemType = { + type: ChatItemValueTypeEnum.text; + text?: { + content: string; + }; +}; +export type SystemChatItemType = { + obj: ChatRoleEnum.System; + value: SystemChatItemValueItemType[]; +}; +export type AIChatItemValueItemType = { + type: ChatItemValueTypeEnum.text | ChatItemValueTypeEnum.tool; + text?: { + content: string; + }; + tools?: ToolModuleResponseItemType[]; +}; +export type AIChatItemType = { + obj: ChatRoleEnum.AI; + value: AIChatItemValueItemType[]; + userGoodFeedback?: string; + userBadFeedback?: string; + customFeedbacks?: string[]; + adminFeedback?: AdminFbkType; + [DispatchNodeResponseKeyEnum.nodeResponse]?: ChatHistoryItemResType[]; +}; +export type ChatItemValueItemType = + | UserChatItemValueItemType + | SystemChatItemValueItemType + | AIChatItemValueItemType; + +export type ChatItemSchema = (UserChatItemType | SystemChatItemType | AIChatItemType) & { dataId: string; chatId: string; userId: string; @@ -38,13 +93,6 @@ export type ChatItemSchema = { tmbId: string; appId: string; time: Date; - obj: `${ChatRoleEnum}`; - value: string; - userGoodFeedback?: string; - userBadFeedback?: string; - customFeedbacks?: string[]; - adminFeedback?: AdminFbkType; - [ModuleOutputKeyEnum.responseData]?: ChatHistoryItemResType[]; }; export type AdminFbkType = { @@ -56,22 +104,16 @@ export type AdminFbkType = { }; /* --------- chat item ---------- */ -export type ChatItemType = { +export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & { dataId?: string; - obj: ChatItemSchema['obj']; - value: any; - userGoodFeedback?: string; - userBadFeedback?: string; - customFeedbacks?: ChatItemSchema['customFeedbacks']; - adminFeedback?: ChatItemSchema['feedback']; - [ModuleOutputKeyEnum.responseData]?: ChatHistoryItemResType[]; }; -export type ChatSiteItemType = ChatItemType & { +export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & { + dataId?: string; status: `${ChatStatusEnum}`; moduleName?: string; ttsBuffer?: Uint8Array; -}; +} & ChatBoxInputType; /* --------- team chat --------- */ export type ChatAppListSchema = { @@ -93,60 +135,25 @@ export type ChatHistoryItemType = HistoryItemType & { }; /* ------- response data ------------ */ -export type moduleDispatchResType = { - // common - moduleLogo?: string; - runningTime?: number; - query?: string; - textOutput?: string; - - // bill - tokens?: number; - model?: string; - contextTotalLen?: number; - totalPoints?: number; - - // chat - temperature?: number; - maxToken?: number; - quoteList?: SearchDataResponseItemType[]; - historyPreview?: ChatItemType[]; // completion context array. history will slice - - // dataset search - similarity?: number; - limit?: number; - searchMode?: `${DatasetSearchModeEnum}`; - searchUsingReRank?: boolean; - extensionModel?: string; - extensionResult?: string; - extensionTokens?: number; - - // cq - cqList?: ClassifyQuestionAgentItemType[]; - cqResult?: string; - - // content extract - extractDescription?: string; - extractResult?: Record; - - // http - params?: Record; - body?: Record; - headers?: Record; - httpResult?: Record; - - // plugin output - pluginOutput?: Record; - pluginDetail?: ChatHistoryItemResType[]; - - // tf switch - tfSwitchResult?: boolean; - - // abandon - tokens?: number; -}; - -export type ChatHistoryItemResType = moduleDispatchResType & { +export type ChatHistoryItemResType = DispatchNodeResponseType & { moduleType: `${FlowNodeTypeEnum}`; moduleName: string; }; + +/* One tool run response */ +export type ToolRunResponseItemType = Record | Array; +/* tool module response */ +export type ToolModuleResponseItemType = { + id: string; + toolName: string; // tool name + toolAvatar: string; + params: string; // tool params + response: string; + functionName: string; +}; + +/* dispatch run time */ +export type RuntimeUserPromptType = { + files?: UserChatItemValueItemType['file'][]; + text: string; +}; diff --git a/packages/global/core/chat/utils.ts b/packages/global/core/chat/utils.ts index 19b6759a9..8dd37bb90 100644 --- a/packages/global/core/chat/utils.ts +++ b/packages/global/core/chat/utils.ts @@ -1,6 +1,79 @@ -import { IMG_BLOCK_KEY, FILE_BLOCK_KEY } from './constants'; +import { DispatchNodeResponseType } from '../module/runtime/type'; +import { FlowNodeInputTypeEnum, FlowNodeTypeEnum } from '../module/node/constant'; +import { ChatItemValueTypeEnum, ChatRoleEnum } from './constants'; +import { ChatHistoryItemResType, ChatItemType } from './type.d'; -export function chatContentReplaceBlock(content: string = '') { - const regex = new RegExp(`\`\`\`(${IMG_BLOCK_KEY})\\n([\\s\\S]*?)\`\`\``, 'g'); - return content.replace(regex, '').trim(); -} +export const getChatTitleFromChatMessage = (message?: ChatItemType, defaultValue = '新对话') => { + // @ts-ignore + const textMsg = message?.value.find((item) => item.type === ChatItemValueTypeEnum.text); + + if (textMsg?.text?.content) { + return textMsg.text.content.slice(0, 20); + } + + return defaultValue; +}; + +export const getHistoryPreview = ( + completeMessages: ChatItemType[] +): { + obj: `${ChatRoleEnum}`; + value: string; +}[] => { + return completeMessages.map((item, i) => { + if (item.obj === ChatRoleEnum.System || i >= completeMessages.length - 2) { + return { + obj: item.obj, + value: item.value?.[0]?.text?.content || '' + }; + } + + const content = item.value + .map((item) => { + if (item.text?.content) { + const content = + item.text.content.length > 20 + ? `${item.text.content.slice(0, 20)}...` + : item.text.content; + return content; + } + return ''; + }) + .filter(Boolean) + .join('\n'); + + return { + obj: item.obj, + value: content + }; + }); +}; + +export const filterPublicNodeResponseData = ({ + flowResponses = [] +}: { + flowResponses?: ChatHistoryItemResType[]; +}) => { + const filedList = ['quoteList', 'moduleType']; + const filterModuleTypeList: any[] = [ + FlowNodeTypeEnum.pluginModule, + FlowNodeTypeEnum.datasetSearchNode, + FlowNodeTypeEnum.tools + ]; + + return flowResponses + .filter((item) => filterModuleTypeList.includes(item.moduleType)) + .map((item) => { + const obj: DispatchNodeResponseType = {}; + for (let key in item) { + if (key === 'toolDetail' || key === 'pluginDetail') { + // @ts-ignore + obj[key] = filterPublicNodeResponseData({ flowResponses: item[key] }); + } else if (filedList.includes(key)) { + // @ts-ignore + obj[key] = item[key]; + } + } + return obj as ChatHistoryItemResType; + }); +}; diff --git a/packages/global/core/dataset/constants.ts b/packages/global/core/dataset/constants.ts index 87f569c1c..c744c0fc8 100644 --- a/packages/global/core/dataset/constants.ts +++ b/packages/global/core/dataset/constants.ts @@ -83,17 +83,17 @@ export const TrainingTypeMap = { [TrainingModeEnum.chunk]: { label: 'core.dataset.training.Chunk mode', tooltip: 'core.dataset.import.Chunk Split Tip', - isPlus: true + openSource: true }, [TrainingModeEnum.auto]: { label: 'core.dataset.training.Auto mode', tooltip: 'core.dataset.training.Auto mode Tip', - isPlus: true + openSource: false }, [TrainingModeEnum.qa]: { label: 'core.dataset.training.QA mode', tooltip: 'core.dataset.import.QA Import Tip', - isPlus: true + openSource: true } }; diff --git a/packages/global/core/module/constants.ts b/packages/global/core/module/constants.ts index 16098f51f..2bf5c9ed1 100644 --- a/packages/global/core/module/constants.ts +++ b/packages/global/core/module/constants.ts @@ -21,7 +21,10 @@ export enum ModuleIOValueTypeEnum { // plugin special type selectApp = 'selectApp', - selectDataset = 'selectDataset' + selectDataset = 'selectDataset', + + // tool + tools = 'tools' } /* reg: modulename key */ @@ -89,12 +92,10 @@ export enum ModuleInputKeyEnum { export enum ModuleOutputKeyEnum { // common - responseData = 'responseData', - moduleDispatchBills = 'moduleDispatchBills', userChatInput = 'userChatInput', finish = 'finish', history = 'history', - answerText = 'answerText', // answer module text key + answerText = 'answerText', // module answer. the value will be show and save to history success = 'success', failed = 'failed', text = 'system_text', @@ -110,7 +111,13 @@ export enum ModuleOutputKeyEnum { // tf switch resultTrue = 'system_resultTrue', - resultFalse = 'system_resultFalse' + resultFalse = 'system_resultFalse', + + // tools + selectedTools = 'selectedTools', + + // http + httpRawResponse = 'httpRawResponse' } export enum VariableInputEnum { diff --git a/packages/global/core/module/node/constant.ts b/packages/global/core/module/node/constant.ts index 4b285437f..9f8760979 100644 --- a/packages/global/core/module/node/constant.ts +++ b/packages/global/core/module/node/constant.ts @@ -56,7 +56,8 @@ export enum FlowNodeTypeEnum { pluginModule = 'pluginModule', pluginInput = 'pluginInput', pluginOutput = 'pluginOutput', - queryExtension = 'cfr' + queryExtension = 'cfr', + tools = 'tools' // abandon } diff --git a/packages/global/core/module/node/type.d.ts b/packages/global/core/module/node/type.d.ts index ecfd02f9c..a643e0b50 100644 --- a/packages/global/core/module/node/type.d.ts +++ b/packages/global/core/module/node/type.d.ts @@ -2,6 +2,7 @@ import { FlowNodeInputTypeEnum, FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleOutputKeyEnum } from '../constants'; import { SelectedDatasetType } from '../api'; import { EditInputFieldMap, EditOutputFieldMap } from './type'; +import { LLMModelTypeEnum } from '../../ai/constants'; export type FlowNodeChangeProps = { moduleId: string; @@ -28,6 +29,7 @@ export type FlowNodeInputItemType = { label: string; description?: string; required?: boolean; + toolDescription?: string; // If this field is not empty, it is entered as a tool edit?: boolean; // Whether to allow editing editField?: EditInputFieldMap; @@ -49,6 +51,8 @@ export type FlowNodeInputItemType = { step?: number; // slider max?: number; // slider, number input min?: number; // slider, number input + + llmModelType?: `${LLMModelTypeEnum}`; }; export type FlowNodeOutputTargetItemType = { @@ -62,6 +66,8 @@ export type FlowNodeOutputItemType = { label?: string; description?: string; + required?: boolean; + defaultValue?: any; edit?: boolean; editField?: EditOutputFieldMap; @@ -74,12 +80,14 @@ export type FlowNodeOutputItemType = { export type EditInputFieldMap = EditOutputFieldMap & { inputType?: boolean; required?: boolean; + isToolInput?: boolean; }; export type EditOutputFieldMap = { name?: boolean; key?: boolean; description?: boolean; dataType?: boolean; + defaultValue?: boolean; }; export type EditNodeFieldType = { inputType?: `${FlowNodeInputTypeEnum}`; // input type @@ -89,6 +97,8 @@ export type EditNodeFieldType = { label?: string; description?: string; valueType?: `${ModuleIOValueTypeEnum}`; + isToolInput?: boolean; + defaultValue?: string; }; /* ------------- item type --------------- */ diff --git a/packages/global/core/module/runtime/constants.ts b/packages/global/core/module/runtime/constants.ts new file mode 100644 index 000000000..9a28162f2 --- /dev/null +++ b/packages/global/core/module/runtime/constants.ts @@ -0,0 +1,19 @@ +export enum SseResponseEventEnum { + error = 'error', + answer = 'answer', // animation stream + fastAnswer = 'fastAnswer', // direct answer text, not animation + flowNodeStatus = 'flowNodeStatus', // update node status + + toolCall = 'toolCall', // tool start + toolParams = 'toolParams', // tool params return + toolResponse = 'toolResponse', // tool response return + flowResponses = 'flowResponses' // sse response request +} + +export enum DispatchNodeResponseKeyEnum { + nodeResponse = 'responseData', // run node response + nodeDispatchUsages = 'nodeDispatchUsages', // the node bill. + childrenResponses = 'childrenResponses', // Some nodes make recursive calls that need to be returned + toolResponses = 'toolResponses', // The result is passed back to the tool node for use + assistantResponses = 'assistantResponses' // assistant response +} diff --git a/packages/global/core/module/runtime/type.d.ts b/packages/global/core/module/runtime/type.d.ts new file mode 100644 index 000000000..b01454672 --- /dev/null +++ b/packages/global/core/module/runtime/type.d.ts @@ -0,0 +1,101 @@ +import { ChatNodeUsageType } from '../../../support/wallet/bill/type'; +import { ChatItemValueItemType, ToolRunResponseItemType } from '../../chat/type'; +import { FlowNodeInputItemType, FlowNodeOutputItemType } from '../node/type'; +import { ModuleItemType } from '../type'; +import { DispatchNodeResponseKeyEnum } from './constants'; + +export type RunningModuleItemType = { + name: ModuleItemType['name']; + avatar: ModuleItemType['avatar']; + intro?: ModuleItemType['intro']; + moduleId: ModuleItemType['moduleId']; + flowType: ModuleItemType['flowType']; + showStatus?: ModuleItemType['showStatus']; + isEntry?: ModuleItemType['isEntry']; + + inputs: { + key: string; + value?: any; + valueType?: FlowNodeInputItemType['valueType']; + required?: boolean; + toolDescription?: string; + }[]; + outputs: { + key: string; + required?: boolean; + defaultValue?: any; + answer?: boolean; + response?: boolean; + value?: any; + valueType?: FlowNodeOutputItemType['valueType']; + targets: { + moduleId: string; + key: string; + }[]; + }[]; +}; + +export type DispatchNodeResponseType = { + // common + moduleLogo?: string; + runningTime?: number; + query?: string; + textOutput?: string; + + // bill + tokens?: number; + model?: string; + contextTotalLen?: number; + totalPoints?: number; + + // chat + temperature?: number; + maxToken?: number; + quoteList?: SearchDataResponseItemType[]; + historyPreview?: { + obj: `${ChatRoleEnum}`; + value: string; + }[]; // completion context array. history will slice + + // dataset search + similarity?: number; + limit?: number; + searchMode?: `${DatasetSearchModeEnum}`; + searchUsingReRank?: boolean; + extensionModel?: string; + extensionResult?: string; + extensionTokens?: number; + + // cq + cqList?: ClassifyQuestionAgentItemType[]; + cqResult?: string; + + // content extract + extractDescription?: string; + extractResult?: Record; + + // http + params?: Record; + body?: Record; + headers?: Record; + httpResult?: Record; + + // plugin output + pluginOutput?: Record; + pluginDetail?: ChatHistoryItemResType[]; + + // tf switch + tfSwitchResult?: boolean; + + // tool + toolCallTokens?: number; + toolDetail?: ChatHistoryItemResType[]; +}; + +export type DispatchNodeResultType = { + [DispatchNodeResponseKeyEnum.nodeResponse]?: DispatchNodeResponseType; // The node response detail + [DispatchNodeResponseKeyEnum.nodeDispatchUsages]?: ChatNodeUsageType[]; // + [DispatchNodeResponseKeyEnum.childrenResponses]?: DispatchNodeResultType[]; + [DispatchNodeResponseKeyEnum.toolResponses]?: ToolRunResponseItemType; + [DispatchNodeResponseKeyEnum.assistantResponses]?: ChatItemValueItemType[]; +} & T; diff --git a/packages/global/core/module/runtime/utils.ts b/packages/global/core/module/runtime/utils.ts new file mode 100644 index 000000000..76afc526d --- /dev/null +++ b/packages/global/core/module/runtime/utils.ts @@ -0,0 +1,31 @@ +import { ChatCompletionRequestMessageRoleEnum } from '../../ai/constants'; + +export const textAdaptGptResponse = ({ + text, + model = '', + finish_reason = null, + extraData = {} +}: { + model?: string; + text: string | null; + finish_reason?: null | 'stop'; + extraData?: Object; +}) => { + return JSON.stringify({ + ...extraData, + id: '', + object: '', + created: 0, + model, + choices: [ + { + delta: + text === null + ? {} + : { role: ChatCompletionRequestMessageRoleEnum.Assistant, content: text }, + index: 0, + finish_reason + } + ] + }); +}; diff --git a/projects/app/src/web/core/modules/template/system.ts b/packages/global/core/module/template/constants.ts similarity index 55% rename from projects/app/src/web/core/modules/template/system.ts rename to packages/global/core/module/template/constants.ts index 7370a4e0f..2e4c673a0 100644 --- a/projects/app/src/web/core/modules/template/system.ts +++ b/packages/global/core/module/template/constants.ts @@ -1,26 +1,25 @@ -import { UserGuideModule } from '@fastgpt/global/core/module/template/system/userGuide'; -import { UserInputModule } from '@fastgpt/global/core/module/template/system/userInput'; -import { AiChatModule } from '@fastgpt/global/core/module/template/system/aiChat'; -import { DatasetSearchModule } from '@fastgpt/global/core/module/template/system/datasetSearch'; -import { DatasetConcatModule } from '@fastgpt/global/core/module/template/system/datasetConcat'; -import { AssignedAnswerModule } from '@fastgpt/global/core/module/template/system/assignedAnswer'; -import { ClassifyQuestionModule } from '@fastgpt/global/core/module/template/system/classifyQuestion'; -import { ContextExtractModule } from '@fastgpt/global/core/module/template/system/contextExtract'; -import { HttpModule468 } from '@fastgpt/global/core/module/template/system/http468'; -import { HttpModule } from '@fastgpt/global/core/module/template/system/abandon/http'; +import { UserGuideModule } from './system/userGuide'; +import { UserInputModule } from './system/userInput'; +import { AiChatModule } from './system/aiChat'; +import { DatasetSearchModule } from './system/datasetSearch'; +import { DatasetConcatModule } from './system/datasetConcat'; +import { AssignedAnswerModule } from './system/assignedAnswer'; +import { ClassifyQuestionModule } from './system/classifyQuestion'; +import { ContextExtractModule } from './system/contextExtract'; +import { HttpModule468 } from './system/http468'; +import { HttpModule } from './system/abandon/http'; +import { ToolModule } from './system/tools'; -import { RunAppModule } from '@fastgpt/global/core/module/template/system/runApp'; -import { PluginInputModule } from '@fastgpt/global/core/module/template/system/pluginInput'; -import { PluginOutputModule } from '@fastgpt/global/core/module/template/system/pluginOutput'; -import { RunPluginModule } from '@fastgpt/global/core/module/template/system/runPlugin'; -import { AiQueryExtension } from '@fastgpt/global/core/module/template/system/queryExtension'; +import { RunAppModule } from './system/runApp'; +import { PluginInputModule } from './system/pluginInput'; +import { PluginOutputModule } from './system/pluginOutput'; +import { RunPluginModule } from './system/runPlugin'; +import { AiQueryExtension } from './system/queryExtension'; -import type { - FlowModuleTemplateType, - moduleTemplateListType -} from '@fastgpt/global/core/module/type.d'; -import { ModuleTemplateTypeEnum } from '@fastgpt/global/core/module/constants'; +import type { FlowModuleTemplateType, moduleTemplateListType } from '../../module/type.d'; +import { ModuleTemplateTypeEnum } from '../../module/constants'; +/* app flow module templates */ export const appSystemModuleTemplates: FlowModuleTemplateType[] = [ UserGuideModule, UserInputModule, @@ -29,11 +28,13 @@ export const appSystemModuleTemplates: FlowModuleTemplateType[] = [ DatasetSearchModule, DatasetConcatModule, RunAppModule, + ToolModule, ClassifyQuestionModule, ContextExtractModule, HttpModule468, AiQueryExtension ]; +/* plugin flow module templates */ export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [ PluginInputModule, PluginOutputModule, @@ -42,12 +43,14 @@ export const pluginSystemModuleTemplates: FlowModuleTemplateType[] = [ DatasetSearchModule, DatasetConcatModule, RunAppModule, + ToolModule, ClassifyQuestionModule, ContextExtractModule, HttpModule468, AiQueryExtension ]; +/* all module */ export const moduleTemplatesFlat: FlowModuleTemplateType[] = [ UserGuideModule, UserInputModule, @@ -59,6 +62,8 @@ export const moduleTemplatesFlat: FlowModuleTemplateType[] = [ ContextExtractModule, HttpModule468, HttpModule, + ToolModule, + AiChatModule, RunAppModule, PluginInputModule, PluginOutputModule, diff --git a/packages/global/core/module/template/input.ts b/packages/global/core/module/template/input.ts index ad062cf38..32e278798 100644 --- a/packages/global/core/module/template/input.ts +++ b/packages/global/core/module/template/input.ts @@ -2,6 +2,7 @@ import type { FlowNodeInputItemType } from '../node/type.d'; import { DYNAMIC_INPUT_KEY, ModuleInputKeyEnum } from '../constants'; import { FlowNodeInputTypeEnum } from '../node/constant'; import { ModuleIOValueTypeEnum } from '../constants'; +import { chatNodeSystemPromptTip } from './tip'; export const Input_Template_Switch: FlowNodeInputItemType = { key: ModuleInputKeyEnum.switch, @@ -58,6 +59,28 @@ export const Input_Template_DynamicInput: FlowNodeInputItemType = { hideInApp: true }; +export const Input_Template_AiModel: FlowNodeInputItemType = { + key: ModuleInputKeyEnum.aiModel, + type: FlowNodeInputTypeEnum.selectLLMModel, + label: 'core.module.input.label.aiModel', + required: true, + valueType: ModuleIOValueTypeEnum.string, + showTargetInApp: false, + showTargetInPlugin: false +}; + +export const Input_Template_System_Prompt: FlowNodeInputItemType = { + key: ModuleInputKeyEnum.aiSystemPrompt, + type: FlowNodeInputTypeEnum.textarea, + max: 3000, + valueType: ModuleIOValueTypeEnum.string, + label: 'core.ai.Prompt', + description: chatNodeSystemPromptTip, + placeholder: chatNodeSystemPromptTip, + showTargetInApp: true, + showTargetInPlugin: true +}; + export const Input_Template_Dataset_Quote: FlowNodeInputItemType = { key: ModuleInputKeyEnum.aiChatDatasetQuote, type: FlowNodeInputTypeEnum.target, diff --git a/packages/global/core/module/template/system/aiChat.ts b/packages/global/core/module/template/system/aiChat.ts index f11ec5c09..d652a89f7 100644 --- a/packages/global/core/module/template/system/aiChat.ts +++ b/packages/global/core/module/template/system/aiChat.ts @@ -11,9 +11,11 @@ import { ModuleTemplateTypeEnum } from '../../constants'; import { + Input_Template_AiModel, Input_Template_Dataset_Quote, Input_Template_History, Input_Template_Switch, + Input_Template_System_Prompt, Input_Template_UserChatInput } from '../input'; import { chatNodeSystemPromptTip } from '../tip'; @@ -24,20 +26,13 @@ export const AiChatModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.textAnswer, flowType: FlowNodeTypeEnum.chatNode, avatar: '/imgs/module/AI.png', - name: 'core.module.template.Ai chat', - intro: 'core.module.template.Ai chat intro', + name: 'AI 对话', + intro: 'AI 大模型对话', showStatus: true, + // isTool: true, inputs: [ Input_Template_Switch, - { - key: ModuleInputKeyEnum.aiModel, - type: FlowNodeInputTypeEnum.selectLLMModel, - label: 'core.module.input.label.aiModel', - required: true, - valueType: ModuleIOValueTypeEnum.string, - showTargetInApp: false, - showTargetInPlugin: false - }, + Input_Template_AiModel, // --- settings modal { key: ModuleInputKeyEnum.aiChatTemperature, @@ -98,18 +93,13 @@ export const AiChatModule: FlowModuleTemplateType = { }, // settings modal --- { - key: ModuleInputKeyEnum.aiSystemPrompt, - type: FlowNodeInputTypeEnum.textarea, + ...Input_Template_System_Prompt, label: 'core.ai.Prompt', - max: 300, - valueType: ModuleIOValueTypeEnum.string, description: chatNodeSystemPromptTip, - placeholder: chatNodeSystemPromptTip, - showTargetInApp: true, - showTargetInPlugin: true + placeholder: chatNodeSystemPromptTip }, Input_Template_History, - Input_Template_UserChatInput, + { ...Input_Template_UserChatInput, toolDescription: '用户问题' }, Input_Template_Dataset_Quote ], outputs: [ diff --git a/packages/global/core/module/template/system/assignedAnswer.ts b/packages/global/core/module/template/system/assignedAnswer.ts index fb2a3d431..a902a70dc 100644 --- a/packages/global/core/module/template/system/assignedAnswer.ts +++ b/packages/global/core/module/template/system/assignedAnswer.ts @@ -9,8 +9,9 @@ export const AssignedAnswerModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.textAnswer, flowType: FlowNodeTypeEnum.answerNode, avatar: '/imgs/module/reply.png', - name: 'core.module.template.Assigned reply', - intro: 'core.module.template.Assigned reply intro', + name: '指定回复', + intro: + '该模块可以直接回复一段指定的内容。常用于引导、提示。非字符串内容传入时,会转成字符串进行输出。', inputs: [ Input_Template_Switch, { diff --git a/packages/global/core/module/template/system/classifyQuestion.ts b/packages/global/core/module/template/system/classifyQuestion.ts index fff8826d0..af0d26a35 100644 --- a/packages/global/core/module/template/system/classifyQuestion.ts +++ b/packages/global/core/module/template/system/classifyQuestion.ts @@ -6,40 +6,34 @@ import { import { FlowModuleTemplateType } from '../../type.d'; import { ModuleIOValueTypeEnum, ModuleInputKeyEnum, ModuleTemplateTypeEnum } from '../../constants'; import { + Input_Template_AiModel, Input_Template_History, Input_Template_Switch, Input_Template_UserChatInput } from '../input'; import { Output_Template_UserChatInput } from '../output'; +import { Input_Template_System_Prompt } from '../input'; +import { LLMModelTypeEnum } from '../../../ai/constants'; export const ClassifyQuestionModule: FlowModuleTemplateType = { id: FlowNodeTypeEnum.classifyQuestion, templateType: ModuleTemplateTypeEnum.functionCall, flowType: FlowNodeTypeEnum.classifyQuestion, avatar: '/imgs/module/cq.png', - name: 'core.module.template.Classify question', - intro: `core.module.template.Classify question intro`, + name: '问题分类', + intro: `根据用户的历史记录和当前问题判断该次提问的类型。可以添加多组问题类型,下面是一个模板例子:\n类型1: 打招呼\n类型2: 关于商品“使用”问题\n类型3: 关于商品“购买”问题\n类型4: 其他问题`, showStatus: true, inputs: [ Input_Template_Switch, { - key: ModuleInputKeyEnum.aiModel, - type: FlowNodeInputTypeEnum.selectLLMModel, - valueType: ModuleIOValueTypeEnum.string, - label: 'core.module.input.label.Classify model', - required: true, - showTargetInApp: false, - showTargetInPlugin: false + ...Input_Template_AiModel, + llmModelType: LLMModelTypeEnum.classify }, { - key: ModuleInputKeyEnum.aiSystemPrompt, - type: FlowNodeInputTypeEnum.textarea, - valueType: ModuleIOValueTypeEnum.string, + ...Input_Template_System_Prompt, label: 'core.module.input.label.Background', description: 'core.module.input.description.Background', - placeholder: 'core.module.input.placeholder.Classify background', - showTargetInApp: true, - showTargetInPlugin: true + placeholder: 'core.module.input.placeholder.Classify background' }, Input_Template_History, Input_Template_UserChatInput, diff --git a/packages/global/core/module/template/system/contextExtract.ts b/packages/global/core/module/template/system/contextExtract.ts index 86a1bb7bc..003e2fa8b 100644 --- a/packages/global/core/module/template/system/contextExtract.ts +++ b/packages/global/core/module/template/system/contextExtract.ts @@ -10,26 +10,23 @@ import { ModuleOutputKeyEnum, ModuleTemplateTypeEnum } from '../../constants'; -import { Input_Template_History, Input_Template_Switch } from '../input'; +import { Input_Template_AiModel, Input_Template_History, Input_Template_Switch } from '../input'; +import { LLMModelTypeEnum } from '../../../ai/constants'; export const ContextExtractModule: FlowModuleTemplateType = { id: FlowNodeTypeEnum.contentExtract, templateType: ModuleTemplateTypeEnum.functionCall, flowType: FlowNodeTypeEnum.contentExtract, avatar: '/imgs/module/extract.png', - name: 'core.module.template.Extract field', - intro: 'core.module.template.Extract field intro', + name: '文本内容提取', + intro: '可从文本中提取指定的数据,例如:sql语句、搜索关键词、代码等', showStatus: true, + isTool: true, inputs: [ Input_Template_Switch, { - key: ModuleInputKeyEnum.aiModel, - type: FlowNodeInputTypeEnum.selectLLMModel, - valueType: ModuleIOValueTypeEnum.string, - label: 'core.module.input.label.LLM', - required: true, - showTargetInApp: false, - showTargetInPlugin: false + ...Input_Template_AiModel, + llmModelType: LLMModelTypeEnum.extractFields }, { key: ModuleInputKeyEnum.description, @@ -52,7 +49,8 @@ export const ContextExtractModule: FlowModuleTemplateType = { required: true, valueType: ModuleIOValueTypeEnum.string, showTargetInApp: true, - showTargetInPlugin: true + showTargetInPlugin: true, + toolDescription: '需要检索的内容' }, { key: ModuleInputKeyEnum.extractKeys, diff --git a/packages/global/core/module/template/system/datasetConcat.ts b/packages/global/core/module/template/system/datasetConcat.ts index 253b383c6..0d64bc6e9 100644 --- a/packages/global/core/module/template/system/datasetConcat.ts +++ b/packages/global/core/module/template/system/datasetConcat.ts @@ -26,7 +26,7 @@ export const DatasetConcatModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.tools, avatar: '/imgs/module/concat.svg', name: '知识库搜索引用合并', - intro: 'core.module.template.Dataset search result concat intro', + intro: '可以将多个知识库搜索结果进行合并输出。使用 RRF 的合并方式进行最终排序输出。', showStatus: false, inputs: [ Input_Template_Switch, diff --git a/packages/global/core/module/template/system/datasetSearch.ts b/packages/global/core/module/template/system/datasetSearch.ts index a90964c99..d61306d58 100644 --- a/packages/global/core/module/template/system/datasetSearch.ts +++ b/packages/global/core/module/template/system/datasetSearch.ts @@ -19,9 +19,10 @@ export const DatasetSearchModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.functionCall, flowType: FlowNodeTypeEnum.datasetSearchNode, avatar: '/imgs/module/db.png', - name: 'core.module.template.Dataset search', - intro: 'core.module.template.Dataset search intro', + name: '知识库搜索', + intro: '调用知识库搜索能力,查找“有可能”与问题相关的内容', showStatus: true, + isTool: true, inputs: [ Input_Template_Switch, { @@ -97,7 +98,10 @@ export const DatasetSearchModule: FlowModuleTemplateType = { showTargetInPlugin: false, value: '' }, - Input_Template_UserChatInput + { + ...Input_Template_UserChatInput, + toolDescription: '需要检索的内容' + } ], outputs: [ Output_Template_UserChatInput, diff --git a/packages/global/core/module/template/system/http468.ts b/packages/global/core/module/template/system/http468.ts index 8ef519436..d86e5e13f 100644 --- a/packages/global/core/module/template/system/http468.ts +++ b/packages/global/core/module/template/system/http468.ts @@ -5,9 +5,9 @@ import { } from '../../node/constant'; import { FlowModuleTemplateType } from '../../type'; import { - DYNAMIC_INPUT_KEY, ModuleIOValueTypeEnum, ModuleInputKeyEnum, + ModuleOutputKeyEnum, ModuleTemplateTypeEnum } from '../../constants'; import { @@ -22,9 +22,10 @@ export const HttpModule468: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.externalCall, flowType: FlowNodeTypeEnum.httpRequest468, avatar: '/imgs/module/http.png', - name: 'core.module.template.Http request', - intro: 'core.module.template.Http request intro', + name: 'HTTP 请求', + intro: '可以发出一个 HTTP 请求,实现更为复杂的操作(联网搜索、数据库查询等)', showStatus: true, + isTool: true, inputs: [ Input_Template_Switch, { @@ -86,7 +87,6 @@ export const HttpModule468: FlowModuleTemplateType = { editField: { key: true, description: true, - required: true, dataType: true }, defaultEditField: { @@ -94,19 +94,27 @@ export const HttpModule468: FlowModuleTemplateType = { key: '', description: '', inputType: FlowNodeInputTypeEnum.target, - valueType: ModuleIOValueTypeEnum.string, - required: true + valueType: ModuleIOValueTypeEnum.string } } ], outputs: [ Output_Template_Finish, + { + key: ModuleOutputKeyEnum.httpRawResponse, + label: '原始响应', + description: 'HTTP请求的原始响应。只能接受字符串或JSON类型响应数据。', + valueType: ModuleIOValueTypeEnum.any, + type: FlowNodeOutputTypeEnum.source, + targets: [] + }, { ...Output_Template_AddOutput, editField: { key: true, description: true, - dataType: true + dataType: true, + defaultValue: true }, defaultEditField: { label: '', diff --git a/packages/global/core/module/template/system/queryExtension.ts b/packages/global/core/module/template/system/queryExtension.ts index dc598190f..e33e3ecb2 100644 --- a/packages/global/core/module/template/system/queryExtension.ts +++ b/packages/global/core/module/template/system/queryExtension.ts @@ -13,28 +13,26 @@ import { import { Input_Template_History, Input_Template_Switch, - Input_Template_UserChatInput + Input_Template_UserChatInput, + Input_Template_AiModel } from '../input'; import { Output_Template_UserChatInput } from '../output'; +import { LLMModelTypeEnum } from '../../../ai/constants'; export const AiQueryExtension: FlowModuleTemplateType = { id: FlowNodeTypeEnum.chatNode, templateType: ModuleTemplateTypeEnum.other, flowType: FlowNodeTypeEnum.queryExtension, avatar: '/imgs/module/cfr.svg', - name: 'core.module.template.Query extension', - intro: 'core.module.template.Query extension intro', + name: '问题优化', + intro: + '使用问题优化功能,可以提高知识库连续对话时搜索的精度。使用该功能后,会先利用 AI 根据上下文构建一个或多个新的检索词,这些检索词更利于进行知识库搜索。该模块已内置在知识库搜索模块中,如果您仅进行一次知识库搜索,可直接使用知识库内置的补全功能。', showStatus: true, inputs: [ Input_Template_Switch, { - key: ModuleInputKeyEnum.aiModel, - type: FlowNodeInputTypeEnum.selectLLMModel, - label: 'core.module.input.label.aiModel', - required: true, - valueType: ModuleIOValueTypeEnum.string, - showTargetInApp: false, - showTargetInPlugin: false + ...Input_Template_AiModel, + llmModelType: LLMModelTypeEnum.queryExtension }, { key: ModuleInputKeyEnum.aiSystemPrompt, diff --git a/packages/global/core/module/template/system/runApp.ts b/packages/global/core/module/template/system/runApp.ts index ef4cdcecc..3c6976e4f 100644 --- a/packages/global/core/module/template/system/runApp.ts +++ b/packages/global/core/module/template/system/runApp.ts @@ -22,8 +22,8 @@ export const RunAppModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.externalCall, flowType: FlowNodeTypeEnum.runApp, avatar: '/imgs/module/app.png', - name: 'core.module.template.Running app', - intro: 'core.module.template.Running app intro', + name: '应用调用', + intro: '可以选择一个其他应用进行调用', showStatus: true, inputs: [ Input_Template_Switch, @@ -52,7 +52,7 @@ export const RunAppModule: FlowModuleTemplateType = { }, { key: ModuleOutputKeyEnum.answerText, - label: 'AI回复', + label: '回复的文本', description: '将在应用完全结束后触发', valueType: ModuleIOValueTypeEnum.string, type: FlowNodeOutputTypeEnum.source, diff --git a/packages/global/core/module/template/system/runPlugin.ts b/packages/global/core/module/template/system/runPlugin.ts index 45cfb039a..988901675 100644 --- a/packages/global/core/module/template/system/runPlugin.ts +++ b/packages/global/core/module/template/system/runPlugin.ts @@ -9,6 +9,7 @@ export const RunPluginModule: FlowModuleTemplateType = { intro: '', name: '', showStatus: false, + isTool: true, inputs: [], // [{key:'pluginId'},...] outputs: [] }; diff --git a/packages/global/core/module/template/system/tools.ts b/packages/global/core/module/template/system/tools.ts new file mode 100644 index 000000000..73175b7b4 --- /dev/null +++ b/packages/global/core/module/template/system/tools.ts @@ -0,0 +1,52 @@ +import { FlowNodeOutputTypeEnum, FlowNodeTypeEnum } from '../../node/constant'; +import { FlowModuleTemplateType } from '../../type.d'; +import { + ModuleIOValueTypeEnum, + ModuleOutputKeyEnum, + ModuleTemplateTypeEnum +} from '../../constants'; +import { + Input_Template_AiModel, + Input_Template_History, + Input_Template_Switch, + Input_Template_System_Prompt, + Input_Template_UserChatInput +} from '../input'; +import { chatNodeSystemPromptTip } from '../tip'; +import { Output_Template_Finish, Output_Template_UserChatInput } from '../output'; +import { LLMModelTypeEnum } from '../../../ai/constants'; + +export const ToolModule: FlowModuleTemplateType = { + id: FlowNodeTypeEnum.tools, + flowType: FlowNodeTypeEnum.tools, + templateType: ModuleTemplateTypeEnum.functionCall, + avatar: '/imgs/module/tool.svg', + name: '工具调用(实验)', + intro: '通过AI模型自动选择一个或多个工具进行调用。工具可以是其他功能块或插件。', + showStatus: true, + inputs: [ + Input_Template_Switch, + { + ...Input_Template_AiModel, + llmModelType: LLMModelTypeEnum.toolCall + }, + { + ...Input_Template_System_Prompt, + label: 'core.ai.Prompt', + description: chatNodeSystemPromptTip, + placeholder: chatNodeSystemPromptTip + }, + Input_Template_History, + Input_Template_UserChatInput + ], + outputs: [ + Output_Template_UserChatInput, + { + key: ModuleOutputKeyEnum.selectedTools, + valueType: ModuleIOValueTypeEnum.tools, + type: FlowNodeOutputTypeEnum.hidden, + targets: [] + }, + Output_Template_Finish + ] +}; diff --git a/packages/global/core/module/template/system/userGuide.ts b/packages/global/core/module/template/system/userGuide.ts index 95c63ff68..b0ab56517 100644 --- a/packages/global/core/module/template/system/userGuide.ts +++ b/packages/global/core/module/template/system/userGuide.ts @@ -8,7 +8,7 @@ export const UserGuideModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.userGuide, flowType: FlowNodeTypeEnum.userGuide, avatar: '/imgs/module/userGuide.png', - name: 'core.module.template.User guide', + name: '全局配置', intro: userGuideTip, inputs: [ { diff --git a/packages/global/core/module/template/system/userInput.ts b/packages/global/core/module/template/system/userInput.ts index 9bdd1879a..bab65955f 100644 --- a/packages/global/core/module/template/system/userInput.ts +++ b/packages/global/core/module/template/system/userInput.ts @@ -16,8 +16,8 @@ export const UserInputModule: FlowModuleTemplateType = { templateType: ModuleTemplateTypeEnum.systemInput, flowType: FlowNodeTypeEnum.questionInput, avatar: '/imgs/module/userChatInput.svg', - name: 'core.module.template.Chat entrance', - intro: 'core.module.template.Chat entrance intro', + name: '对话入口', + intro: '当用户发送一个内容后,流程将会从这个模块开始执行。', inputs: [ { key: ModuleInputKeyEnum.userChatInput, diff --git a/packages/global/core/module/type.d.ts b/packages/global/core/module/type.d.ts index 7ffc5bbfe..349938169 100644 --- a/packages/global/core/module/type.d.ts +++ b/packages/global/core/module/type.d.ts @@ -5,10 +5,16 @@ import { ModuleTemplateTypeEnum, VariableInputEnum } from './constants'; +import { DispatchNodeResponseKeyEnum } from './runtime/constants'; import { FlowNodeInputItemType, FlowNodeOutputItemType } from './node/type'; import { UserModelSchema } from 'support/user/type'; -import { moduleDispatchResType } from '..//chat/type'; -import { ChatModuleUsageType } from '../../support/wallet/bill/type'; +import { + ChatItemValueItemType, + ToolRunResponseItemType, + UserChatItemValueItemType +} from '../chat/type'; +import { ChatNodeUsageType } from '../../support/wallet/bill/type'; +import { RunningModuleItemType } from './runtime/type'; export type FlowModuleTemplateType = { id: string; // module id, unique @@ -17,6 +23,7 @@ export type FlowModuleTemplateType = { avatar?: string; name: string; intro: string; // template list intro + isTool?: boolean; // can be connected by tool showStatus?: boolean; // chatting response step status inputs: FlowNodeInputItemType[]; outputs: FlowNodeOutputItemType[]; @@ -44,6 +51,9 @@ export type ModuleItemType = { showStatus?: boolean; inputs: FlowNodeInputItemType[]; outputs: FlowNodeOutputItemType[]; + + // runTime field + isEntry?: boolean; }; /* --------------- function type -------------------- */ @@ -85,30 +95,6 @@ export type ContextExtractAgentItemType = { }; /* -------------- running module -------------- */ -export type RunningModuleItemType = { - name: ModuleItemType['name']; - moduleId: ModuleItemType['moduleId']; - flowType: ModuleItemType['flowType']; - showStatus?: ModuleItemType['showStatus']; -} & { - inputs: { - key: string; - value?: any; - valueType?: `${ModuleIOValueTypeEnum}`; - }[]; - outputs: { - key: string; - answer?: boolean; - response?: boolean; - value?: any; - valueType?: `${ModuleIOValueTypeEnum}`; - targets: { - moduleId: string; - key: string; - }[]; - }[]; -}; - export type ChatDispatchProps = { res: NextApiResponse; mode: 'test' | 'chat'; @@ -120,15 +106,13 @@ export type ChatDispatchProps = { responseChatItemId?: string; histories: ChatItemType[]; variables: Record; + inputFiles?: UserChatItemValueItemType['file'][]; stream: boolean; detail: boolean; // response detail }; export type ModuleDispatchProps = ChatDispatchProps & { module: RunningModuleItemType; + runtimeModules: RunningModuleItemType[]; params: T; }; -export type ModuleDispatchResponse = T & { - [ModuleOutputKeyEnum.responseData]?: moduleDispatchResType; - [ModuleOutputKeyEnum.moduleDispatchBills]?: ChatModuleUsageType[]; -}; diff --git a/packages/global/core/module/utils.ts b/packages/global/core/module/utils.ts index 880946f55..107abd714 100644 --- a/packages/global/core/module/utils.ts +++ b/packages/global/core/module/utils.ts @@ -10,6 +10,7 @@ import { AppTTSConfigType, ModuleItemType, VariableItemType } from './type'; import { Input_Template_Switch } from './template/input'; import { EditorVariablePickerType } from '../../../web/components/common/Textarea/PromptEditor/type'; +/* module */ export const getGuideModule = (modules: ModuleItemType[]) => modules.find((item) => item.flowType === FlowNodeTypeEnum.userGuide); @@ -57,13 +58,13 @@ export const getModuleInputUiField = (input: FlowNodeInputItemType) => { return {}; }; -export function plugin2ModuleIO( +export const plugin2ModuleIO = ( pluginId: string, modules: ModuleItemType[] ): { inputs: FlowNodeInputItemType[]; outputs: FlowNodeOutputItemType[]; -} { +} => { const pluginInput = modules.find((module) => module.flowType === FlowNodeTypeEnum.pluginInput); const pluginOutput = modules.find((module) => module.flowType === FlowNodeTypeEnum.pluginOutput); @@ -99,7 +100,7 @@ export function plugin2ModuleIO( })) : [] }; -} +}; export const formatEditorVariablePickerIcon = ( variables: { key: string; label: string; type?: `${VariableInputEnum}` }[] diff --git a/packages/global/package.json b/packages/global/package.json index cce0af6d6..bfc0b52f4 100644 --- a/packages/global/package.json +++ b/packages/global/package.json @@ -6,7 +6,7 @@ "dayjs": "^1.11.7", "encoding": "^0.1.13", "js-tiktoken": "^1.0.7", - "openai": "4.23.0", + "openai": "4.28.0", "nanoid": "^4.0.1", "timezones-list": "^3.0.2" }, diff --git a/packages/global/support/outLink/api.d.ts b/packages/global/support/outLink/api.d.ts index 20099288a..0088ae7d7 100644 --- a/packages/global/support/outLink/api.d.ts +++ b/packages/global/support/outLink/api.d.ts @@ -1,4 +1,4 @@ -import type { HistoryItemType, ChatSiteItemType } from '../../core/chat/type.d'; +import type { HistoryItemType } from '../../core/chat/type.d'; import { OutLinkSchema } from './type.d'; export type AuthOutLinkInitProps = { diff --git a/packages/global/support/wallet/bill/type.d.ts b/packages/global/support/wallet/bill/type.d.ts index 89a4afdaa..053263003 100644 --- a/packages/global/support/wallet/bill/type.d.ts +++ b/packages/global/support/wallet/bill/type.d.ts @@ -22,7 +22,7 @@ export type BillSchemaType = { username: string; }; -export type ChatModuleUsageType = { +export type ChatNodeUsageType = { tokens?: number; totalPoints: number; moduleName: string; diff --git a/packages/service/common/response/constant.ts b/packages/service/common/response/constant.ts deleted file mode 100644 index 2fe662eb8..000000000 --- a/packages/service/common/response/constant.ts +++ /dev/null @@ -1,7 +0,0 @@ -export enum sseResponseEventEnum { - error = 'error', - answer = 'answer', // animation stream - response = 'response', // direct response, not animation - moduleStatus = 'moduleStatus', - appStreamResponse = 'appStreamResponse' // sse response request -} diff --git a/packages/service/common/response/index.ts b/packages/service/common/response/index.ts index feb4fa183..53aa65d82 100644 --- a/packages/service/common/response/index.ts +++ b/packages/service/common/response/index.ts @@ -1,5 +1,5 @@ import type { NextApiResponse } from 'next'; -import { sseResponseEventEnum } from './constant'; +import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants'; import { proxyError, ERROR_RESPONSE, ERROR_ENUM } from '@fastgpt/global/common/error/errorCode'; import { addLog } from '../system/log'; import { clearCookie } from '../../support/permission/controller'; @@ -70,7 +70,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => { return responseWrite({ res, - event: sseResponseEventEnum.error, + event: SseResponseEventEnum.error, data: JSON.stringify(ERROR_RESPONSE[errResponseKey]) }); } @@ -90,7 +90,7 @@ export const sseErrRes = (res: NextApiResponse, error: any) => { responseWrite({ res, - event: sseResponseEventEnum.error, + event: SseResponseEventEnum.error, data: JSON.stringify({ message: replaceSensitiveText(msg) }) }); }; @@ -132,3 +132,22 @@ export function responseWrite({ event && Write(`event: ${event}\n`); Write(`data: ${data}\n\n`); } + +export const responseWriteNodeStatus = ({ + res, + status = 'running', + name +}: { + res?: NextApiResponse; + status?: 'running'; + name: string; +}) => { + responseWrite({ + res, + event: SseResponseEventEnum.flowNodeStatus, + data: JSON.stringify({ + status, + name + }) + }); +}; diff --git a/packages/service/core/ai/functions/createQuestionGuide.ts b/packages/service/core/ai/functions/createQuestionGuide.ts index 127f8eb3a..6b16fca39 100644 --- a/packages/service/core/ai/functions/createQuestionGuide.ts +++ b/packages/service/core/ai/functions/createQuestionGuide.ts @@ -1,4 +1,4 @@ -import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d'; +import type { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type.d'; import { getAIApi } from '../config'; import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken'; @@ -8,10 +8,10 @@ export async function createQuestionGuide({ messages, model }: { - messages: ChatMessageItemType[]; + messages: ChatCompletionMessageParam[]; model: string; }) { - const concatMessages: ChatMessageItemType[] = [ + const concatMessages: ChatCompletionMessageParam[] = [ ...messages, { role: 'user', diff --git a/packages/service/core/ai/functions/queryExtension.ts b/packages/service/core/ai/functions/queryExtension.ts index 8073955ba..680ff639e 100644 --- a/packages/service/core/ai/functions/queryExtension.ts +++ b/packages/service/core/ai/functions/queryExtension.ts @@ -2,6 +2,7 @@ import { replaceVariable } from '@fastgpt/global/common/string/tools'; import { getAIApi } from '../config'; import { ChatItemType } from '@fastgpt/global/core/chat/type'; import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken'; +import { ChatCompletionMessageParam } from '@fastgpt/global/core/ai/type'; /* query extension - 问题扩展 @@ -133,7 +134,7 @@ A: ${chatBg} histories: concatFewShot }) } - ]; + ] as ChatCompletionMessageParam[]; const result = await ai.chat.completions.create({ model: model, temperature: 0.01, diff --git a/packages/service/core/chat/chatItemSchema.ts b/packages/service/core/chat/chatItemSchema.ts index cd1919af8..65a62ca62 100644 --- a/packages/service/core/chat/chatItemSchema.ts +++ b/packages/service/core/chat/chatItemSchema.ts @@ -10,6 +10,7 @@ import { import { appCollectionName } from '../app/schema'; import { userCollectionName } from '../../support/user/schema'; import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants'; +import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants'; export const ChatItemCollectionName = 'chatitems'; @@ -54,8 +55,8 @@ const ChatItemSchema = new Schema({ }, value: { // chat content - type: String, - default: '' + type: Array, + default: [] }, userGoodFeedback: { type: String @@ -75,7 +76,7 @@ const ChatItemSchema = new Schema({ a: String } }, - [ModuleOutputKeyEnum.responseData]: { + [DispatchNodeResponseKeyEnum.nodeResponse]: { type: Array, default: [] } diff --git a/packages/service/core/chat/controller.ts b/packages/service/core/chat/controller.ts index 8dde339b6..c382964d6 100644 --- a/packages/service/core/chat/controller.ts +++ b/packages/service/core/chat/controller.ts @@ -1,6 +1,7 @@ -import type { ChatItemType } from '@fastgpt/global/core/chat/type'; +import type { ChatItemType, ChatItemValueItemType } from '@fastgpt/global/core/chat/type'; import { MongoChatItem } from './chatItemSchema'; import { addLog } from '../../common/system/log'; +import { ChatItemValueTypeEnum } from '@fastgpt/global/core/chat/constants'; export async function getChatItems({ appId, @@ -24,8 +25,27 @@ export async function getChatItems({ history.reverse(); + history.forEach((item) => { + // @ts-ignore + item.value = adaptStringValue(item.value); + }); + return { history }; } +/* 临时适配旧的对话记录,清洗完数据后可删除(4.30刪除) */ +export const adaptStringValue = (value: any): ChatItemValueItemType[] => { + if (typeof value === 'string') { + return [ + { + type: ChatItemValueTypeEnum.text, + text: { + content: value + } + } + ]; + } + return value; +}; export const addCustomFeedbacks = async ({ appId, diff --git a/packages/service/core/chat/utils.ts b/packages/service/core/chat/utils.ts index 0f993145d..67db3f8ec 100644 --- a/packages/service/core/chat/utils.ts +++ b/packages/service/core/chat/utils.ts @@ -1,21 +1,40 @@ -import type { ChatItemType } from '@fastgpt/global/core/chat/type.d'; import { ChatRoleEnum, IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants'; -import { countMessagesTokens } from '@fastgpt/global/common/string/tiktoken'; -import type { ChatCompletionContentPart } from '@fastgpt/global/core/ai/type.d'; +import { countGptMessagesTokens } from '@fastgpt/global/common/string/tiktoken'; +import type { + ChatCompletionContentPart, + ChatCompletionMessageParam +} from '@fastgpt/global/core/ai/type.d'; import axios from 'axios'; +import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/global/core/ai/constants'; /* slice chat context by tokens */ -export function ChatContextFilter({ +export function filterGPTMessageByMaxTokens({ messages = [], maxTokens }: { - messages: ChatItemType[]; + messages: ChatCompletionMessageParam[]; maxTokens: number; }) { if (!Array.isArray(messages)) { return []; } - const rawTextLen = messages.reduce((sum, item) => sum + item.value.length, 0); + const rawTextLen = messages.reduce((sum, item) => { + if (typeof item.content === 'string') { + return sum + item.content.length; + } + if (Array.isArray(item.content)) { + return ( + sum + + item.content.reduce((sum, item) => { + if (item.type === 'text') { + return sum + item.text.length; + } + return sum; + }, 0) + ); + } + return sum; + }, 0); // If the text length is less than half of the maximum token, no calculation is required if (rawTextLen < maxTokens * 0.5) { @@ -23,19 +42,21 @@ export function ChatContextFilter({ } // filter startWith system prompt - const chatStartIndex = messages.findIndex((item) => item.obj !== ChatRoleEnum.System); - const systemPrompts: ChatItemType[] = messages.slice(0, chatStartIndex); - const chatPrompts: ChatItemType[] = messages.slice(chatStartIndex); + const chatStartIndex = messages.findIndex( + (item) => item.role !== ChatCompletionRequestMessageRoleEnum.System + ); + const systemPrompts: ChatCompletionMessageParam[] = messages.slice(0, chatStartIndex); + const chatPrompts: ChatCompletionMessageParam[] = messages.slice(chatStartIndex); // reduce token of systemPrompt - maxTokens -= countMessagesTokens(systemPrompts); + maxTokens -= countGptMessagesTokens(systemPrompts); // Save the last chat prompt(question) const question = chatPrompts.pop(); if (!question) { return systemPrompts; } - const chats: ChatItemType[] = [question]; + const chats: ChatCompletionMessageParam[] = [question]; // 从后往前截取对话内容, 每次需要截取2个 while (1) { @@ -45,7 +66,7 @@ export function ChatContextFilter({ break; } - const tokens = countMessagesTokens([assistant, user]); + const tokens = countGptMessagesTokens([assistant, user]); maxTokens -= tokens; /* 整体 tokens 超出范围,截断 */ if (maxTokens < 0) { @@ -62,6 +83,30 @@ export function ChatContextFilter({ return [...systemPrompts, ...chats]; } +export const formatGPTMessagesInRequestBefore = (messages: ChatCompletionMessageParam[]) => { + return messages + .map((item) => { + if (!item.content) return; + if (typeof item.content === 'string') { + return { + ...item, + content: item.content.trim() + }; + } + + // array + if (item.content.length === 0) return; + if (item.content.length === 1 && item.content[0].type === 'text') { + return { + ...item, + content: item.content[0].text + }; + } + + return item; + }) + .filter(Boolean) as ChatCompletionMessageParam[]; +}; /** string to vision model. Follow the markdown code block rule for interception: @@ -175,3 +220,21 @@ export async function formatStr2ChatContent(str: string) { return content ? content : null; } + +export const loadChatImgToBase64 = async (content: string | ChatCompletionContentPart[]) => { + if (typeof content === 'string') { + return content; + } + return Promise.all( + content.map(async (item) => { + if (item.type === 'text') return item; + // load image + const response = await axios.get(item.image_url.url, { + responseType: 'arraybuffer' + }); + const base64 = Buffer.from(response.data).toString('base64'); + item.image_url.url = `data:${response.headers['content-type']};base64,${base64}`; + return item; + }) + ); +}; diff --git a/packages/service/support/outLink/tools.ts b/packages/service/support/outLink/tools.ts index 878234c63..ce337ff53 100644 --- a/packages/service/support/outLink/tools.ts +++ b/packages/service/support/outLink/tools.ts @@ -25,12 +25,12 @@ export const pushResult2Remote = async ({ outLinkUid, shareId, appName, - responseData + flowResponses }: { outLinkUid?: string; // raw id, not parse shareId?: string; appName: string; - responseData?: ChatHistoryItemResType[]; + flowResponses?: ChatHistoryItemResType[]; }) => { if (!shareId || !outLinkUid || !FastGPTProUrl) return; try { @@ -46,7 +46,7 @@ export const pushResult2Remote = async ({ data: { token: outLinkUid, appName, - responseData + responseData: flowResponses } }); } catch (error) {} diff --git a/packages/web/components/common/CustomModal/index.tsx b/packages/web/components/common/CustomModal/index.tsx index 1549965f3..2e42b1b12 100644 --- a/packages/web/components/common/CustomModal/index.tsx +++ b/packages/web/components/common/CustomModal/index.tsx @@ -7,7 +7,8 @@ import { ModalCloseButton, ModalContentProps, Box, - Image + Image, + useMediaQuery } from '@chakra-ui/react'; import MyIcon from '../Icon'; @@ -31,12 +32,14 @@ const CustomModal = ({ maxW = ['90vw', '600px'], ...props }: MyModalProps) => { + const [isPc] = useMediaQuery('(min-width: 900px)'); + return ( onClose && onClose()} autoFocus={false} - isCentered={isCentered} + isCentered={isPc ? isCentered : true} > import('./icons/collectionLight.svg'), collectionSolid: () => import('./icons/collectionSolid.svg'), 'common/addCircleLight': () => import('./icons/common/addCircleLight.svg'), + 'common/addLight': () => import('./icons/common/addLight.svg'), 'common/backFill': () => import('./icons/common/backFill.svg'), 'common/backLight': () => import('./icons/common/backLight.svg'), 'common/clearLight': () => import('./icons/common/clearLight.svg'), diff --git a/packages/web/components/common/Icon/icons/common/addLight.svg b/packages/web/components/common/Icon/icons/common/addLight.svg new file mode 100644 index 000000000..6600df543 --- /dev/null +++ b/packages/web/components/common/Icon/icons/common/addLight.svg @@ -0,0 +1,4 @@ + + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/common/retryLight.svg b/packages/web/components/common/Icon/icons/common/retryLight.svg index 97b185b40..222bbdb86 100644 --- a/packages/web/components/common/Icon/icons/common/retryLight.svg +++ b/packages/web/components/common/Icon/icons/common/retryLight.svg @@ -1,8 +1,4 @@ - - + + d="M4.82661 10.9785C3.86099 10.7071 3.07349 10.1763 2.46411 9.38585C1.85474 8.59543 1.55005 7.68544 1.55005 6.65587C1.55005 6.12256 1.63911 5.61489 1.81724 5.13286C1.99536 4.65082 2.24849 4.20883 2.57661 3.80688C2.67974 3.69461 2.8063 3.63604 2.9563 3.63117C3.1063 3.62631 3.24224 3.68488 3.36411 3.80688C3.46724 3.9098 3.52124 4.03611 3.52611 4.18581C3.53099 4.33551 3.48167 4.47586 3.37817 4.60685C3.15317 4.89689 2.97974 5.215 2.85786 5.56119C2.73599 5.90737 2.67505 6.27226 2.67505 6.65587C2.67505 7.41373 2.8978 8.08981 3.3433 8.68413C3.7888 9.27844 4.36292 9.683 5.06567 9.89782C5.18755 9.93525 5.28824 10.0054 5.36774 10.1083C5.44724 10.2113 5.48717 10.3235 5.48755 10.4452C5.48755 10.6323 5.42192 10.7797 5.29067 10.8875C5.15942 10.9953 5.00474 11.0256 4.82661 10.9785ZM7.27349 10.9785C7.09536 11.0253 6.94067 10.9925 6.80942 10.8802C6.67817 10.768 6.61255 10.6183 6.61255 10.4311C6.61255 10.3189 6.65249 10.2113 6.73236 10.1083C6.81224 10.0054 6.91292 9.93525 7.03442 9.89782C7.73755 9.67327 8.31186 9.26627 8.75736 8.67683C9.20286 8.08738 9.42542 7.41373 9.42505 6.65587C9.42505 5.72024 9.09692 4.92496 8.44067 4.27002C7.78442 3.61508 6.98755 3.28761 6.05005 3.28761H6.00786L6.23286 3.51216C6.33599 3.61508 6.38755 3.74607 6.38755 3.90512C6.38755 4.06418 6.33599 4.19517 6.23286 4.29809C6.12974 4.40101 5.99849 4.45247 5.83911 4.45247C5.67974 4.45247 5.54849 4.40101 5.44536 4.29809L4.26411 3.1192C4.20786 3.06306 4.16811 3.00224 4.14486 2.93675C4.12161 2.87126 4.1098 2.80108 4.10942 2.72623C4.10942 2.65138 4.12124 2.58121 4.14486 2.51572C4.16849 2.45022 4.20824 2.38941 4.26411 2.33327L5.44536 1.15438C5.54849 1.05146 5.67974 1 5.83911 1C5.99849 1 6.12974 1.05146 6.23286 1.15438C6.33599 1.2573 6.38755 1.38829 6.38755 1.54734C6.38755 1.7064 6.33599 1.83739 6.23286 1.94031L6.00786 2.16486H6.05005C7.3063 2.16486 8.37036 2.59992 9.24224 3.47006C10.1141 4.34019 10.55 5.40213 10.55 6.65587C10.55 7.67571 10.2454 8.58326 9.63599 9.37855C9.02661 10.1738 8.23911 10.7071 7.27349 10.9785Z" /> \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/copy.svg b/packages/web/components/common/Icon/icons/copy.svg index 5e0e79bd3..01be11a7e 100644 --- a/packages/web/components/common/Icon/icons/copy.svg +++ b/packages/web/components/common/Icon/icons/copy.svg @@ -1 +1,8 @@ - \ No newline at end of file + + + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/core/chat/feedback/badLight.svg b/packages/web/components/common/Icon/icons/core/chat/feedback/badLight.svg index 6b164484c..7924772ae 100644 --- a/packages/web/components/common/Icon/icons/core/chat/feedback/badLight.svg +++ b/packages/web/components/common/Icon/icons/core/chat/feedback/badLight.svg @@ -1,8 +1,4 @@ - - - + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/core/chat/feedback/goodLight.svg b/packages/web/components/common/Icon/icons/core/chat/feedback/goodLight.svg index df343ebea..f2c2cc8db 100644 --- a/packages/web/components/common/Icon/icons/core/chat/feedback/goodLight.svg +++ b/packages/web/components/common/Icon/icons/core/chat/feedback/goodLight.svg @@ -1,9 +1,4 @@ - - - + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/delete.svg b/packages/web/components/common/Icon/icons/delete.svg index 60df09a4b..b46ee1b7c 100644 --- a/packages/web/components/common/Icon/icons/delete.svg +++ b/packages/web/components/common/Icon/icons/delete.svg @@ -1 +1,4 @@ - \ No newline at end of file + + + \ No newline at end of file diff --git a/packages/web/components/common/Icon/icons/more.svg b/packages/web/components/common/Icon/icons/more.svg index fac518301..18a3ead84 100644 --- a/packages/web/components/common/Icon/icons/more.svg +++ b/packages/web/components/common/Icon/icons/more.svg @@ -1 +1,14 @@ - \ No newline at end of file + + + + + + \ No newline at end of file diff --git a/packages/web/components/common/MyTooltip/index.tsx b/packages/web/components/common/MyTooltip/index.tsx index 90f0ee128..cd1e9f417 100644 --- a/packages/web/components/common/MyTooltip/index.tsx +++ b/packages/web/components/common/MyTooltip/index.tsx @@ -1,12 +1,14 @@ import React from 'react'; -import { Tooltip, TooltipProps } from '@chakra-ui/react'; +import { Tooltip, TooltipProps, useMediaQuery } from '@chakra-ui/react'; interface Props extends TooltipProps { forceShow?: boolean; } -const MyTooltip = ({ children, shouldWrapChildren = true, ...props }: Props) => { - return ( +const MyTooltip = ({ children, forceShow = false, shouldWrapChildren = true, ...props }: Props) => { + const [isPc] = useMediaQuery('(min-width: 900px)'); + + return isPc || forceShow ? ( > {children} + ) : ( + <>{children} ); }; diff --git a/packages/web/components/common/Tag/Fill.tsx b/packages/web/components/common/Tag/Fill.tsx new file mode 100644 index 000000000..2752df43e --- /dev/null +++ b/packages/web/components/common/Tag/Fill.tsx @@ -0,0 +1,48 @@ +import React, { useMemo } from 'react'; +import { Flex, type FlexProps } from '@chakra-ui/react'; + +interface Props extends FlexProps { + children: React.ReactNode | React.ReactNode[]; + colorSchema?: 'blue' | 'green' | 'gray' | 'purple'; +} + +const FillTag = ({ children, colorSchema = 'blue', ...props }: Props) => { + const theme = useMemo(() => { + const map = { + blue: { + bg: 'primary.50', + color: 'primary.600' + }, + green: { + bg: 'green.50', + color: 'green.600' + }, + purple: { + bg: '#F6EEFA', + color: '#A558C9' + }, + gray: { + bg: 'myGray.50', + color: 'myGray.700' + } + }; + return map[colorSchema]; + }, [colorSchema]); + + return ( + + {children} + + ); +}; + +export default FillTag; diff --git a/packages/web/hooks/useEditTextarea.tsx b/packages/web/hooks/useEditTextarea.tsx new file mode 100644 index 000000000..a62ea70e2 --- /dev/null +++ b/packages/web/hooks/useEditTextarea.tsx @@ -0,0 +1,129 @@ +import React, { useCallback, useRef } from 'react'; +import { + ModalFooter, + ModalBody, + Input, + useDisclosure, + Button, + Box, + Textarea +} from '@chakra-ui/react'; +import MyModal from '../components/common/CustomModal'; +import { useToast } from './useToast'; +import { useTranslation } from 'next-i18next'; + +export const useEditTextarea = ({ + title, + tip, + placeholder = '', + canEmpty = true, + valueRule +}: { + title: string; + tip?: string; + placeholder?: string; + canEmpty?: boolean; + valueRule?: (val: string) => string | void; +}) => { + const { t } = useTranslation(); + const { isOpen, onOpen, onClose } = useDisclosure(); + + const textareaRef = useRef(null); + const onSuccessCb = useRef<(content: string) => void | Promise>(); + const onErrorCb = useRef<(err: any) => void>(); + const { toast } = useToast(); + const defaultValue = useRef(''); + + const onOpenModal = useCallback( + ({ + defaultVal, + onSuccess, + onError + }: { + defaultVal: string; + onSuccess: (content: string) => any; + onError?: (err: any) => void; + }) => { + onOpen(); + onSuccessCb.current = onSuccess; + onErrorCb.current = onError; + defaultValue.current = defaultVal; + }, + [onOpen] + ); + + const onclickConfirm = useCallback(async () => { + if (!textareaRef.current || !onSuccessCb.current) return; + const val = textareaRef.current.value; + + if (!canEmpty && !val) { + textareaRef.current.focus(); + return; + } + + if (valueRule) { + const result = valueRule(val); + if (result) { + return toast({ + status: 'warning', + title: result + }); + } + } + + try { + await onSuccessCb.current(val); + + onClose(); + } catch (err) { + onErrorCb.current?.(err); + } + }, [canEmpty, onClose]); + + // eslint-disable-next-line react/display-name + const EditModal = useCallback( + ({ + maxLength = 30, + iconSrc = 'modal/edit', + closeBtnText = t('common.Close') + }: { + maxLength?: number; + iconSrc?: string; + closeBtnText?: string; + }) => ( + + + {!!tip && ( + + {tip} + + )} + + + + + {!!closeBtnText && ( + + )} + + + + ), + [isOpen, onClose, onclickConfirm, placeholder, tip, title] + ); + + return { + onOpenModal, + EditModal + }; +}; diff --git a/packages/web/hooks/usePagination.tsx b/packages/web/hooks/usePagination.tsx index be273341b..056105436 100644 --- a/packages/web/hooks/usePagination.tsx +++ b/packages/web/hooks/usePagination.tsx @@ -5,6 +5,7 @@ import { useMutation } from '@tanstack/react-query'; import { throttle } from 'lodash'; import { useToast } from './useToast'; +import { getErrText } from '@fastgpt/global/common/error/utils'; const thresholdVal = 100; @@ -62,7 +63,7 @@ export function usePagination({ onChange && onChange(num); } catch (error: any) { toast({ - title: error?.message || '获取数据异常', + title: getErrText(error, '获取数据异常'), status: 'error' }); console.log(error); diff --git a/packages/web/hooks/useToast.ts b/packages/web/hooks/useToast.ts index 217321c97..55ff6a756 100644 --- a/packages/web/hooks/useToast.ts +++ b/packages/web/hooks/useToast.ts @@ -1,13 +1,21 @@ import { useToast as uToast, UseToastOptions } from '@chakra-ui/react'; +import { useCallback, useMemo } from 'react'; export const useToast = (props?: UseToastOptions) => { const toast = uToast({ position: 'top', duration: 2000, - ...(props && props) + ...props }); + const myToast = useCallback( + (options?: UseToastOptions) => { + toast(options); + }, + [props] + ); + return { - toast + toast: myToast }; }; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3e7caaf7d..4e9df22ba 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -51,8 +51,8 @@ importers: specifier: ^4.0.1 version: 4.0.1 openai: - specifier: 4.23.0 - version: 4.23.0(encoding@0.1.13) + specifier: 4.28.0 + version: 4.28.0(encoding@0.1.13) timezones-list: specifier: ^3.0.2 version: 3.0.2 @@ -9406,8 +9406,8 @@ packages: mimic-fn: 4.0.0 dev: true - /openai@4.23.0(encoding@0.1.13): - resolution: {integrity: sha512-ey2CXh1OTcTUa0AWZWuTpgA9t5GuAG3DVU1MofCRUI7fQJij8XJ3Sr0VtgxoAE69C9wbHBMCux8Z/IQZfSwHiA==} + /openai@4.28.0(encoding@0.1.13): + resolution: {integrity: sha512-JM8fhcpmpGN0vrUwGquYIzdcEQHtFuom6sRCbbCM6CfzZXNuRk33G7KfeRAIfnaCxSpzrP5iHtwJzIm6biUZ2Q==} hasBin: true dependencies: '@types/node': 18.19.21 diff --git a/projects/app/data/config.json b/projects/app/data/config.json index 21cc58981..1c4d4c8df 100644 --- a/projects/app/data/config.json +++ b/projects/app/data/config.json @@ -7,7 +7,7 @@ }, "llmModels": [ { - "model": "gpt-3.5-turbo-1106", + "model": "gpt-3.5-turbo", "name": "gpt-3.5-turbo", "maxContext": 16000, "maxResponse": 4000, @@ -17,6 +17,10 @@ "censor": false, "vision": false, "datasetProcess": false, + "usedInClassify": true, + "usedInExtractFields": true, + "useInToolCall": true, + "usedInQueryExtension": true, "toolChoice": true, "functionCall": false, "customCQPrompt": "", @@ -35,6 +39,10 @@ "censor": false, "vision": false, "datasetProcess": true, + "usedInClassify": true, + "usedInExtractFields": true, + "useInToolCall": true, + "usedInQueryExtension": true, "toolChoice": true, "functionCall": false, "customCQPrompt": "", @@ -53,6 +61,10 @@ "censor": false, "vision": false, "datasetProcess": false, + "usedInClassify": true, + "usedInExtractFields": true, + "useInToolCall": true, + "usedInQueryExtension": true, "toolChoice": true, "functionCall": false, "customCQPrompt": "", @@ -71,6 +83,10 @@ "censor": false, "vision": true, "datasetProcess": false, + "usedInClassify": false, + "usedInExtractFields": false, + "useInToolCall": false, + "usedInQueryExtension": false, "toolChoice": true, "functionCall": false, "customCQPrompt": "", diff --git a/projects/app/package.json b/projects/app/package.json index aac4c38cd..51c53bb5d 100644 --- a/projects/app/package.json +++ b/projects/app/package.json @@ -1,6 +1,6 @@ { "name": "app", - "version": "4.6.9", + "version": "4.7", "private": false, "scripts": { "dev": "next dev", diff --git a/projects/app/public/docs/versionIntro.md b/projects/app/public/docs/versionIntro.md index 166d0c410..7c5bcfff2 100644 --- a/projects/app/public/docs/versionIntro.md +++ b/projects/app/public/docs/versionIntro.md @@ -1,11 +1,7 @@ -### Fast GPT V4.6.9 +### FastGPT V4.7 -1. 新增 - 知识库新增“增强处理”训练模式,可生成更多类型索引。 -2. 新增 - 完善了HTTP模块的变量提示。 -3. 新增 - HTTP模块支持OpenAI单接口导入。 -4. 新增 - 全局变量支持增加外部变量。可通过分享链接的Query或 API 的 variables 参数传入。 -5. 新增 - 内容提取模块增加默认值。 -6. 优化 - 问题补全。增加英文类型。同时可以设置为单独模块,方便复用。 -7. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro) -8. [使用文档](https://doc.fastgpt.in/docs/intro/) -9. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/) \ No newline at end of file +1. 新增 - 工具调用模块,可以让LLM模型根据用户意图,动态的选择其他模型或插件执行。 +2. 优化 - 高级编排性能 +3. [点击查看高级编排介绍文档](https://doc.fastgpt.in/docs/workflow/intro) +4. [使用文档](https://doc.fastgpt.in/docs/intro/) +5. [点击查看商业版](https://doc.fastgpt.in/docs/commercial/) \ No newline at end of file diff --git a/projects/app/public/imgs/module/tool.svg b/projects/app/public/imgs/module/tool.svg new file mode 100644 index 000000000..eb03b6027 --- /dev/null +++ b/projects/app/public/imgs/module/tool.svg @@ -0,0 +1,12 @@ + + + + + + \ No newline at end of file diff --git a/projects/app/public/locales/en/common.json b/projects/app/public/locales/en/common.json index 162667aac..c40bcdb6e 100644 --- a/projects/app/public/locales/en/common.json +++ b/projects/app/public/locales/en/common.json @@ -89,6 +89,7 @@ "Edit": "Edit", "Exit": "Exit", "Expired Time": "Expired", + "Field": "Field", "File": "File", "Filed is repeat": "Filed is repeated", "Filed is repeated": "", @@ -409,7 +410,7 @@ "Speaking": "I'm listening...", "Start Chat": "Start Chat", "Stop Speak": "Stop Speak", - "Type a message": "Input problem", + "Type a message": "Enter your question here", "Unpin": "Unpin", "You need to a chat app": "You don't have apps available", "error": { @@ -444,9 +445,11 @@ "response": { "Complete Response": "Complete Response", "Extension model": "Extension model", - "Plugin Resonse Detail": "Plugin Detail", + "Plugin response detail": "Plugin Detail", "Read complete response": "Read Detail", "Read complete response tips": "Click to see the detailed process", + "Tool call response detail": "Tool call detail", + "Tool call tokens": "Tool call tokens", "context total length": "Context Length", "module cq": "Question classification list", "module cq result": "Classification Result", @@ -776,6 +779,7 @@ }, "Default value": "Default ", "Default value placeholder": "Null characters are returned by default", + "Edit intro": "Edit", "Field Description": "Description", "Field Name": "Name", "Field Type": "Type", @@ -933,6 +937,9 @@ "textEditor": { "Text Edit": "Text Edit" }, + "tool": { + "Tool input": "Tool input" + }, "valueType": { "any": "Any", "boolean": "Boolean", @@ -942,7 +949,8 @@ "number": "Number", "selectApp": "Select App", "selectDataset": "Select Dataset", - "string": "String" + "string": "String", + "tools": "tools" }, "variable": { "External type": "External", diff --git a/projects/app/public/locales/zh/common.json b/projects/app/public/locales/zh/common.json index 19990533f..8b4b9ee5c 100644 --- a/projects/app/public/locales/zh/common.json +++ b/projects/app/public/locales/zh/common.json @@ -89,6 +89,7 @@ "Edit": "编辑", "Exit": "退出", "Expired Time": "过期时间", + "Field": "字段", "File": "文件", "Filed is repeat": "", "Filed is repeated": "字段重复了", @@ -444,9 +445,11 @@ "response": { "Complete Response": "完整响应", "Extension model": "问题优化模型", - "Plugin Resonse Detail": "插件详情", + "Plugin response detail": "插件详情", "Read complete response": "查看详情", "Read complete response tips": "点击查看详细流程", + "Tool call response detail": "工具运行详情", + "Tool call tokens": "工具调用Tokens消耗", "context total length": "上下文总长度", "module cq": "问题分类列表", "module cq result": "分类结果", @@ -778,10 +781,11 @@ }, "Default value": "默认值", "Default value placeholder": "不填则默认返回空字符", + "Edit intro": "编辑描述", "Field Description": "字段描述", "Field Name": "字段名", "Field Type": "字段类型", - "Field key": "字段 Key", + "Field key": "字段Key", "Http request props": "请求参数", "Http request settings": "请求配置", "Input Type": "输入类型", @@ -935,6 +939,9 @@ "textEditor": { "Text Edit": "文本加工" }, + "tool": { + "Tool input": "工具输入" + }, "valueType": { "any": "任意", "boolean": "布尔", @@ -944,7 +951,8 @@ "number": "数字", "selectApp": "应用选择", "selectDataset": "知识库选择", - "string": "字符串" + "string": "字符串", + "tools": "工具调用" }, "variable": { "External type": "外部传入", diff --git a/projects/app/src/components/ChatBox/ContextModal.tsx b/projects/app/src/components/ChatBox/ContextModal.tsx index 903984373..91cfe92c9 100644 --- a/projects/app/src/components/ChatBox/ContextModal.tsx +++ b/projects/app/src/components/ChatBox/ContextModal.tsx @@ -1,13 +1,13 @@ import React from 'react'; -import { ModalBody, Box, useTheme, Flex, Image } from '@chakra-ui/react'; -import { ChatItemType } from '@fastgpt/global/core/chat/type'; +import { ModalBody, Box, useTheme } from '@chakra-ui/react'; import MyModal from '../MyModal'; +import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d'; const ContextModal = ({ context = [], onClose }: { - context: ChatItemType[]; + context: DispatchNodeResponseType['historyPreview']; onClose: () => void; }) => { const theme = useTheme(); @@ -17,7 +17,7 @@ const ContextModal = ({ isOpen={true} onClose={onClose} iconSrc="/imgs/modal/chatHistory.svg" - title={`完整对话记录(${context.length}条)`} + title={`上下文预览(${context.length}条)`} h={['90vh', '80vh']} minW={['90vw', '600px']} isCentered diff --git a/projects/app/src/components/ChatBox/MessageInput.tsx b/projects/app/src/components/ChatBox/MessageInput.tsx index 5a49bb0e3..3b637fc2c 100644 --- a/projects/app/src/components/ChatBox/MessageInput.tsx +++ b/projects/app/src/components/ChatBox/MessageInput.tsx @@ -1,36 +1,24 @@ import { useSpeech } from '@/web/common/hooks/useSpeech'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { Box, Flex, Image, Spinner, Textarea } from '@chakra-ui/react'; -import React, { useRef, useEffect, useCallback, useState, useTransition } from 'react'; +import React, { useRef, useEffect, useCallback, useMemo } from 'react'; import { useTranslation } from 'next-i18next'; import MyTooltip from '../MyTooltip'; import MyIcon from '@fastgpt/web/components/common/Icon'; -import { useRouter } from 'next/router'; import { useSelectFile } from '@/web/common/file/hooks/useSelectFile'; import { compressImgFileAndUpload } from '@/web/common/file/controller'; import { customAlphabet } from 'nanoid'; -import { IMG_BLOCK_KEY } from '@fastgpt/global/core/chat/constants'; +import { ChatFileTypeEnum } from '@fastgpt/global/core/chat/constants'; import { addDays } from 'date-fns'; import { useRequest } from '@/web/common/hooks/useRequest'; import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; +import { ChatBoxInputFormType, ChatBoxInputType, UserInputFileItemType } from './type'; +import { textareaMinH } from './constants'; +import { UseFormReturn, useFieldArray } from 'react-hook-form'; const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 6); -enum FileTypeEnum { - image = 'image', - file = 'file' -} -type FileItemType = { - id: string; - rawFile: File; - type: `${FileTypeEnum}`; - name: string; - icon: string; // img is base64 - src?: string; -}; - const MessageInput = ({ - onChange, onSendMessage, onStop, isChatting, @@ -40,17 +28,29 @@ const MessageInput = ({ shareId, outLinkUid, teamId, - teamToken + teamToken, + chatForm }: OutLinkChatAuthProps & { - onChange?: (e: string) => void; - onSendMessage: (e: string) => void; + onSendMessage: (val: ChatBoxInputType) => void; onStop: () => void; isChatting: boolean; showFileSelector?: boolean; TextareaDom: React.MutableRefObject; - resetInputVal: (val: string) => void; + resetInputVal: (val: ChatBoxInputType) => void; + chatForm: UseFormReturn; }) => { - const [, startSts] = useTransition(); + const { setValue, watch, control } = chatForm; + const inputValue = watch('input'); + const { + update: updateFile, + remove: removeFile, + fields: fileList, + append: appendFile, + replace: replaceFile + } = useFieldArray({ + control, + name: 'files' + }); const { isSpeaking, @@ -64,45 +64,38 @@ const MessageInput = ({ const { isPc } = useSystemStore(); const canvasRef = useRef(null); const { t } = useTranslation(); - const textareaMinH = '22px'; - const [fileList, setFileList] = useState([]); - const havInput = !!TextareaDom.current?.value || fileList.length > 0; + const havInput = !!inputValue || fileList.length > 0; + + /* file selector and upload */ const { File, onOpen: onOpenSelectFile } = useSelectFile({ fileType: 'image/*', multiple: true, maxCount: 10 }); - const { mutate: uploadFile } = useRequest({ - mutationFn: async (file: FileItemType) => { - if (file.type === FileTypeEnum.image) { + mutationFn: async ({ file, fileIndex }: { file: UserInputFileItemType; fileIndex: number }) => { + if (file.type === ChatFileTypeEnum.image && file.rawFile) { try { - const src = await compressImgFileAndUpload({ + const url = await compressImgFileAndUpload({ type: MongoImageTypeEnum.chatImage, file: file.rawFile, maxW: 4329, maxH: 4329, maxSize: 1024 * 1024 * 5, - // 30 day expired. + // 7 day expired. expiredTime: addDays(new Date(), 7), shareId, outLinkUid, teamId, teamToken }); - setFileList((state) => - state.map((item) => - item.id === file.id - ? { - ...item, - src: `${location.origin}${src}` - } - : item - ) - ); + updateFile(fileIndex, { + ...file, + url: `${location.origin}${url}` + }); } catch (error) { - setFileList((state) => state.filter((item) => item.id !== file.id)); + removeFile(fileIndex); console.log(error); return Promise.reject(error); } @@ -110,7 +103,6 @@ const MessageInput = ({ }, errorToast: t('common.Upload File Failed') }); - const onSelectFile = useCallback( async (files: File[]) => { if (!files || files.length === 0) { @@ -119,7 +111,7 @@ const MessageInput = ({ const loadFiles = await Promise.all( files.map( (file) => - new Promise((resolve, reject) => { + new Promise((resolve, reject) => { if (file.type.includes('image')) { const reader = new FileReader(); reader.readAsDataURL(file); @@ -127,11 +119,10 @@ const MessageInput = ({ const item = { id: nanoid(), rawFile: file, - type: FileTypeEnum.image, + type: ChatFileTypeEnum.image, name: file.name, icon: reader.result as string }; - uploadFile(item); resolve(item); }; reader.onerror = () => { @@ -141,7 +132,7 @@ const MessageInput = ({ resolve({ id: nanoid(), rawFile: file, - type: FileTypeEnum.file, + type: ChatFileTypeEnum.file, name: file.name, icon: 'file/pdf' }); @@ -149,29 +140,28 @@ const MessageInput = ({ }) ) ); + appendFile(loadFiles); - setFileList((state) => [...state, ...loadFiles]); + loadFiles.forEach((file, i) => + uploadFile({ + file, + fileIndex: i + fileList.length + }) + ); }, - [uploadFile] + [appendFile, fileList.length, uploadFile] ); + /* on send */ const handleSend = useCallback(async () => { const textareaValue = TextareaDom.current?.value || ''; - const images = fileList.filter((item) => item.type === FileTypeEnum.image); - const imagesText = - images.length === 0 - ? '' - : `\`\`\`${IMG_BLOCK_KEY} -${images.map((img) => JSON.stringify({ src: img.src })).join('\n')} -\`\`\` -`; - - const inputMessage = `${imagesText}${textareaValue}`; - - onSendMessage(inputMessage); - setFileList([]); - }, [TextareaDom, fileList, onSendMessage]); + onSendMessage({ + text: textareaValue.trim(), + files: fileList + }); + replaceFile([]); + }, [TextareaDom, fileList, onSendMessage, replaceFile]); useEffect(() => { if (!stream) { @@ -231,7 +221,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')} {/* file preview */} - {fileList.map((item) => ( + {fileList.map((item, index) => ( JSON.stringify({ src: img.src })).join('\n')} rounded={'md'} position={'relative'} _hover={{ - '.close-icon': { display: item.src ? 'block' : 'none' } + '.close-icon': { display: item.url ? 'block' : 'none' } }} > {/* uploading */} - {!item.src && ( + {!item.url && ( JSON.stringify({ src: img.src })).join('\n')} right={'-8px'} top={'-8px'} onClick={() => { - setFileList((state) => state.filter((file) => file.id !== item.id)); + removeFile(index); }} className="close-icon" display={['', 'none']} /> - {item.type === FileTypeEnum.image && ( + {item.type === ChatFileTypeEnum.image && ( JSON.stringify({ src: img.src })).join('\n')} boxShadow={'none !important'} color={'myGray.900'} isDisabled={isSpeaking} + value={inputValue} onChange={(e) => { const textarea = e.target; textarea.style.height = textareaMinH; textarea.style.height = `${textarea.scrollHeight}px`; - - startSts(() => { - onChange?.(textarea.value); - }); + setValue('input', textarea.value); }} onKeyDown={(e) => { // enter send.(pc or iframe && enter and unPress shift) @@ -406,7 +394,7 @@ ${images.map((img) => JSON.stringify({ src: img.src })).join('\n')} if (isSpeaking) { return stopSpeak(); } - startSpeak(resetInputVal); + startSpeak((text) => resetInputVal({ text })); }} > diff --git a/projects/app/src/components/ChatBox/ResponseTags.tsx b/projects/app/src/components/ChatBox/ResponseTags.tsx index 87e72b363..8a91a2861 100644 --- a/projects/app/src/components/ChatBox/ResponseTags.tsx +++ b/projects/app/src/components/ChatBox/ResponseTags.tsx @@ -1,5 +1,6 @@ import React, { useMemo, useState } from 'react'; -import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d'; +import { type ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d'; +import { DispatchNodeResponseType } from '@fastgpt/global/core/module/runtime/type.d'; import type { ChatItemType } from '@fastgpt/global/core/chat/type'; import { Flex, BoxProps, useDisclosure, useTheme, Box } from '@chakra-ui/react'; import { useTranslation } from 'next-i18next'; @@ -14,15 +15,18 @@ import ChatBoxDivider from '@/components/core/chat/Divider'; import { strIsLink } from '@fastgpt/global/common/string/tools'; import MyIcon from '@fastgpt/web/components/common/Icon'; -const QuoteModal = dynamic(() => import('./QuoteModal'), { ssr: false }); -const ContextModal = dynamic(() => import('./ContextModal'), { ssr: false }); -const WholeResponseModal = dynamic(() => import('./WholeResponseModal'), { ssr: false }); +const QuoteModal = dynamic(() => import('./QuoteModal')); +const ContextModal = dynamic(() => import('./ContextModal')); +const WholeResponseModal = dynamic(() => import('./WholeResponseModal')); + +const isLLMNode = (item: ChatHistoryItemResType) => + item.moduleType === FlowNodeTypeEnum.chatNode || item.moduleType === FlowNodeTypeEnum.tools; const ResponseTags = ({ - responseData = [], + flowResponses = [], showDetail }: { - responseData?: ChatHistoryItemResType[]; + flowResponses?: ChatHistoryItemResType[]; showDetail: boolean; }) => { const theme = useTheme(); @@ -36,7 +40,8 @@ const ResponseTags = ({ sourceName: string; }; }>(); - const [contextModalData, setContextModalData] = useState(); + const [contextModalData, setContextModalData] = + useState(); const { isOpen: isOpenWholeModal, onOpen: onOpenWholeModal, @@ -44,18 +49,29 @@ const ResponseTags = ({ } = useDisclosure(); const { - chatAccount, + llmModuleAccount, quoteList = [], sourceList = [], historyPreview = [], runningTime = 0 } = useMemo(() => { - const chatData = responseData.find((item) => item.moduleType === FlowNodeTypeEnum.chatNode); - const quoteList = responseData - .filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode) + const flatResponse = flowResponses + .map((item) => { + if (item.pluginDetail || item.toolDetail) { + return [item, ...(item.pluginDetail || []), ...(item.toolDetail || [])]; + } + return item; + }) + .flat(); + + const chatData = flatResponse.find(isLLMNode); + + const quoteList = flatResponse + .filter((item) => item.moduleType === FlowNodeTypeEnum.datasetSearchNode) .map((item) => item.quoteList) .flat() .filter(Boolean) as SearchDataResponseItemType[]; + const sourceList = quoteList.reduce( (acc: Record, cur) => { if (!acc[cur.collectionId]) { @@ -67,8 +83,7 @@ const ResponseTags = ({ ); return { - chatAccount: responseData.filter((item) => item.moduleType === FlowNodeTypeEnum.chatNode) - .length, + llmModuleAccount: flatResponse.filter(isLLMNode).length, quoteList, sourceList: Object.values(sourceList) .flat() @@ -80,16 +95,16 @@ const ResponseTags = ({ collectionId: item.collectionId })), historyPreview: chatData?.historyPreview, - runningTime: +responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2) + runningTime: +flowResponses.reduce((sum, item) => sum + (item.runningTime || 0), 0).toFixed(2) }; - }, [showDetail, responseData]); + }, [showDetail, flowResponses]); const TagStyles: BoxProps = { mr: 2, bg: 'transparent' }; - return responseData.length === 0 ? null : ( + return flowResponses.length === 0 ? null : ( <> {sourceList.length > 0 && ( <> @@ -148,10 +163,10 @@ const ResponseTags = ({ )} - {chatAccount === 1 && ( + {llmModuleAccount === 1 && ( <> {historyPreview.length > 0 && ( - + )} - {chatAccount > 1 && ( + {llmModuleAccount > 1 && ( 多组 AI 对话 @@ -196,7 +211,7 @@ const ResponseTags = ({ )} {isOpenWholeModal && ( diff --git a/projects/app/src/components/ChatBox/WholeResponseModal.tsx b/projects/app/src/components/ChatBox/WholeResponseModal.tsx index be972471d..c5224c505 100644 --- a/projects/app/src/components/ChatBox/WholeResponseModal.tsx +++ b/projects/app/src/components/ChatBox/WholeResponseModal.tsx @@ -2,7 +2,7 @@ import React, { useMemo, useState } from 'react'; import { Box, useTheme, Flex, Image } from '@chakra-ui/react'; import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d'; import { useTranslation } from 'next-i18next'; -import { moduleTemplatesFlat } from '@/web/core/modules/template/system'; +import { moduleTemplatesFlat } from '@fastgpt/global/core/module/template/constants'; import Tabs from '../Tabs'; import MyModal from '../MyModal'; @@ -143,6 +143,11 @@ const ResponseBox = React.memo(function ResponseBox({ /> + + - {activeModule.quoteList && activeModule.quoteList.length > 0 && ( - } - /> - )} {/* dataset search */} @@ -213,6 +212,12 @@ const ResponseBox = React.memo(function ResponseBox({ label={t('support.wallet.usage.Extension result')} value={`${activeModule?.extensionResult}`} /> + {activeModule.quoteList && activeModule.quoteList.length > 0 && ( + } + /> + )} {/* classify question */} @@ -276,7 +281,7 @@ const ResponseBox = React.memo(function ResponseBox({ )} {activeModule?.pluginDetail && activeModule?.pluginDetail.length > 0 && ( } /> )} @@ -284,6 +289,14 @@ const ResponseBox = React.memo(function ResponseBox({ {/* text output */} + + {/* tool call */} + {activeModule?.toolDetail && activeModule?.toolDetail.length > 0 && ( + } + /> + )} ); diff --git a/projects/app/src/components/ChatBox/components/ChatAvatar.tsx b/projects/app/src/components/ChatBox/components/ChatAvatar.tsx new file mode 100644 index 000000000..f67b65484 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/ChatAvatar.tsx @@ -0,0 +1,23 @@ +import Avatar from '@/components/Avatar'; +import { Box } from '@chakra-ui/react'; +import { useTheme } from '@chakra-ui/system'; +import React from 'react'; + +const ChatAvatar = ({ src, type }: { src?: string; type: 'Human' | 'AI' }) => { + const theme = useTheme(); + return ( + + + + ); +}; + +export default React.memo(ChatAvatar); diff --git a/projects/app/src/components/ChatBox/components/ChatController.tsx b/projects/app/src/components/ChatBox/components/ChatController.tsx new file mode 100644 index 000000000..10d5ed831 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/ChatController.tsx @@ -0,0 +1,236 @@ +import { useCopyData } from '@/web/common/hooks/useCopyData'; +import { useAudioPlay } from '@/web/common/utils/voice'; +import { Flex, FlexProps, Image, css, useTheme } from '@chakra-ui/react'; +import { ChatSiteItemType } from '@fastgpt/global/core/chat/type'; +import { AppTTSConfigType } from '@fastgpt/global/core/module/type'; +import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; +import MyTooltip from '@fastgpt/web/components/common/MyTooltip'; +import React from 'react'; +import { useTranslation } from 'next-i18next'; +import MyIcon from '@fastgpt/web/components/common/Icon'; +import { formatChatValue2InputType } from '../utils'; +import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants'; + +export type ChatControllerProps = { + isChatting: boolean; + chat: ChatSiteItemType; + setChatHistories?: React.Dispatch>; + showVoiceIcon?: boolean; + ttsConfig?: AppTTSConfigType; + onRetry?: () => void; + onDelete?: () => void; + onMark?: () => void; + onReadUserDislike?: () => void; + onCloseUserLike?: () => void; + onAddUserLike?: () => void; + onAddUserDislike?: () => void; +}; + +const ChatController = ({ + isChatting, + chat, + setChatHistories, + showVoiceIcon, + ttsConfig, + onReadUserDislike, + onCloseUserLike, + onMark, + onRetry, + onDelete, + onAddUserDislike, + onAddUserLike, + shareId, + outLinkUid, + teamId, + teamToken +}: OutLinkChatAuthProps & ChatControllerProps & FlexProps) => { + const theme = useTheme(); + const { t } = useTranslation(); + const { copyData } = useCopyData(); + const { audioLoading, audioPlaying, hasAudio, playAudio, cancelAudio } = useAudioPlay({ + ttsConfig, + shareId, + outLinkUid, + teamId, + teamToken + }); + const controlIconStyle = { + w: '14px', + cursor: 'pointer', + p: '5px', + bg: 'white', + borderRight: theme.borders.base + }; + const controlContainerStyle = { + className: 'control', + color: 'myGray.400', + display: 'flex' + }; + + return ( + *:last-child, & > *:last-child svg': { + borderRight: 'none', + borderRadius: 'md' + } + })} + > + + copyData(formatChatValue2InputType(chat.value).text || '')} + /> + + {!!onDelete && !isChatting && ( + <> + {onRetry && ( + + + + )} + + + + + )} + {showVoiceIcon && + hasAudio && + (audioLoading ? ( + + + + ) : audioPlaying ? ( + + + cancelAudio()} + /> + + + + ) : ( + + { + const response = await playAudio({ + buffer: chat.ttsBuffer, + chatItemId: chat.dataId, + text: formatChatValue2InputType(chat.value).text || '' + }); + + if (!setChatHistories || !response.buffer) return; + setChatHistories((state) => + state.map((item) => + item.dataId === chat.dataId + ? { + ...item, + ttsBuffer: response.buffer + } + : item + ) + ); + }} + /> + + ))} + {!!onMark && ( + + + + )} + {chat.obj === ChatRoleEnum.AI && ( + <> + {!!onCloseUserLike && chat.userGoodFeedback && ( + + + + )} + {!!onReadUserDislike && chat.userBadFeedback && ( + + + + )} + {!!onAddUserLike && ( + + )} + {!!onAddUserDislike && ( + + )} + + )} + + ); +}; + +export default React.memo(ChatController); diff --git a/projects/app/src/components/ChatBox/components/ChatItem.tsx b/projects/app/src/components/ChatBox/components/ChatItem.tsx new file mode 100644 index 000000000..31f9792eb --- /dev/null +++ b/projects/app/src/components/ChatBox/components/ChatItem.tsx @@ -0,0 +1,236 @@ +import { + Box, + BoxProps, + Card, + Flex, + useTheme, + Accordion, + AccordionItem, + AccordionButton, + AccordionPanel, + AccordionIcon, + Button, + Image, + Grid +} from '@chakra-ui/react'; +import React, { useMemo } from 'react'; +import ChatController, { type ChatControllerProps } from './ChatController'; +import ChatAvatar from './ChatAvatar'; +import { MessageCardStyle } from '../constants'; +import { formatChatValue2InputType } from '../utils'; +import Markdown, { CodeClassName } from '@/components/Markdown'; +import styles from '../index.module.scss'; +import MyIcon from '@fastgpt/web/components/common/Icon'; +import { + ChatItemValueTypeEnum, + ChatRoleEnum, + ChatStatusEnum +} from '@fastgpt/global/core/chat/constants'; +import FilesBlock from './FilesBox'; + +const colorMap = { + [ChatStatusEnum.loading]: { + bg: 'myGray.100', + color: 'myGray.600' + }, + [ChatStatusEnum.running]: { + bg: 'green.50', + color: 'green.700' + }, + [ChatStatusEnum.finish]: { + bg: 'green.50', + color: 'green.700' + } +}; + +const ChatItem = ({ + type, + avatar, + statusBoxData, + children, + isLastChild, + questionGuides = [], + ...chatControllerProps +}: { + type: ChatRoleEnum.Human | ChatRoleEnum.AI; + avatar?: string; + statusBoxData?: { + status: `${ChatStatusEnum}`; + name: string; + }; + isLastChild?: boolean; + questionGuides?: string[]; + children?: React.ReactNode; +} & ChatControllerProps) => { + const theme = useTheme(); + const styleMap: BoxProps = + type === ChatRoleEnum.Human + ? { + order: 0, + borderRadius: '8px 0 8px 8px', + justifyContent: 'flex-end', + textAlign: 'right', + bg: 'primary.100' + } + : { + order: 1, + borderRadius: '0 8px 8px 8px', + justifyContent: 'flex-start', + textAlign: 'left', + bg: 'myGray.50' + }; + const { chat, isChatting } = chatControllerProps; + + const ContentCard = useMemo(() => { + if (type === 'Human') { + const { text, files = [] } = formatChatValue2InputType(chat.value); + + return ( + <> + {files.length > 0 && } + + + ); + } + /* AI */ + return ( + + {chat.value.map((value, i) => { + const key = `${chat.dataId}-ai-${i}`; + if (value.text) { + let source = value.text?.content || ''; + + if (isLastChild && !isChatting && questionGuides.length > 0) { + source = `${source} +\`\`\`${CodeClassName.questionGuide} +${JSON.stringify(questionGuides)}`; + } + + return ; + } + if (value.type === ChatItemValueTypeEnum.tool && value.tools) { + return ( + + {value.tools.map((tool) => { + const toolParams = (() => { + try { + return JSON.stringify(JSON.parse(tool.params), null, 2); + } catch (error) { + return tool.params; + } + })(); + const toolResponse = (() => { + try { + return JSON.stringify(JSON.parse(tool.response), null, 2); + } catch (error) { + return tool.response; + } + })(); + return ( + + + + + + {tool.toolName} + {isChatting && !tool.response && ( + + )} + + + + {toolParams && ( + + )} + {toolResponse && ( + + )} + + + + + ); + })} + + ); + } + })} + + ); + }, [chat.dataId, chat.value, isChatting, isLastChild, questionGuides, type]); + + const chatStatusMap = useMemo(() => { + if (!statusBoxData?.status) return; + return colorMap[statusBoxData.status]; + }, [statusBoxData?.status]); + + return ( + <> + {/* control icon */} + + {isChatting && type === ChatRoleEnum.AI && isLastChild ? null : ( + + + + )} + + + {!!chatStatusMap && statusBoxData && isLastChild && ( + + + + {statusBoxData.name} + + + )} + + {/* content */} + + + {ContentCard} + {children} + + + + ); +}; + +export default ChatItem; diff --git a/projects/app/src/components/ChatBox/components/Empty.tsx b/projects/app/src/components/ChatBox/components/Empty.tsx new file mode 100644 index 000000000..31b69d9f9 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/Empty.tsx @@ -0,0 +1,23 @@ +import Markdown from '@/components/Markdown'; +import { useMarkdown } from '@/web/common/hooks/useMarkdown'; +import { Box, Card } from '@chakra-ui/react'; +import React from 'react'; + +const Empty = () => { + const { data: chatProblem } = useMarkdown({ url: '/chatProblem.md' }); + const { data: versionIntro } = useMarkdown({ url: '/versionIntro.md' }); + + return ( + + {/* version intro */} + + + + + + + + ); +}; + +export default React.memo(Empty); diff --git a/projects/app/src/components/ChatBox/components/FilesBox.tsx b/projects/app/src/components/ChatBox/components/FilesBox.tsx new file mode 100644 index 000000000..7123f7d62 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/FilesBox.tsx @@ -0,0 +1,22 @@ +import { Box, Flex, Grid } from '@chakra-ui/react'; +import MdImage from '@/components/Markdown/img/Image'; +import { UserInputFileItemType } from '@/components/ChatBox/type'; + +const FilesBlock = ({ files }: { files: UserInputFileItemType[] }) => { + return ( + + {files.map(({ id, type, name, url }, i) => { + if (type === 'image') { + return ( + + + + ); + } + return null; + })} + + ); +}; + +export default FilesBlock; diff --git a/projects/app/src/components/ChatBox/components/VariableInput.tsx b/projects/app/src/components/ChatBox/components/VariableInput.tsx new file mode 100644 index 000000000..e00f32d44 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/VariableInput.tsx @@ -0,0 +1,119 @@ +import { VariableItemType } from '@fastgpt/global/core/module/type'; +import React, { useState } from 'react'; +import { UseFormReturn } from 'react-hook-form'; +import { useTranslation } from 'next-i18next'; +import { Box, Button, Card, Input, Textarea } from '@chakra-ui/react'; +import ChatAvatar from './ChatAvatar'; +import { MessageCardStyle } from '../constants'; +import { VariableInputEnum } from '@fastgpt/global/core/module/constants'; +import MySelect from '@fastgpt/web/components/common/MySelect'; +import MyIcon from '@fastgpt/web/components/common/Icon'; +import { ChatBoxInputFormType } from '../type.d'; + +const VariableInput = ({ + appAvatar, + variableModules, + variableIsFinish, + chatForm, + onSubmitVariables +}: { + appAvatar?: string; + variableModules: VariableItemType[]; + variableIsFinish: boolean; + onSubmitVariables: (e: Record) => void; + chatForm: UseFormReturn; +}) => { + const { t } = useTranslation(); + const [refresh, setRefresh] = useState(false); + const { register, setValue, handleSubmit: handleSubmitChat, watch } = chatForm; + const variables = watch('variables'); + + return ( + + {/* avatar */} + + {/* message */} + + + {variableModules.map((item) => ( + + + {item.label} + {item.required && ( + + * + + )} + + {item.type === VariableInputEnum.input && ( + + )} + {item.type === VariableInputEnum.textarea && ( + + )} + {item.type === VariableInputEnum.select && ( + ({ + label: item.value, + value: item.value + }))} + {...register(`variables.${item.key}`, { + required: item.required + })} + value={variables[item.key]} + onchange={(e) => { + setValue(`variables.${item.key}`, e); + setRefresh((state) => !state); + }} + /> + )} + + ))} + {!variableIsFinish && ( + } + size={'sm'} + maxW={'100px'} + onClick={handleSubmitChat((data) => { + onSubmitVariables(data); + })} + > + {t('core.chat.Start Chat')} + + )} + + + + ); +}; + +export default React.memo(VariableInput); diff --git a/projects/app/src/components/ChatBox/components/WelcomeBox.tsx b/projects/app/src/components/ChatBox/components/WelcomeBox.tsx new file mode 100644 index 000000000..8fac62843 --- /dev/null +++ b/projects/app/src/components/ChatBox/components/WelcomeBox.tsx @@ -0,0 +1,28 @@ +import { Box, Card } from '@chakra-ui/react'; +import React from 'react'; +import { MessageCardStyle } from '../constants'; +import Markdown from '@/components/Markdown'; +import ChatAvatar from './ChatAvatar'; + +const WelcomeBox = ({ appAvatar, welcomeText }: { appAvatar?: string; welcomeText: string }) => { + return ( + + {/* avatar */} + + {/* message */} + + + + + + + ); +}; + +export default WelcomeBox; diff --git a/projects/app/src/components/ChatBox/constants.ts b/projects/app/src/components/ChatBox/constants.ts new file mode 100644 index 000000000..34ce6f695 --- /dev/null +++ b/projects/app/src/components/ChatBox/constants.ts @@ -0,0 +1,13 @@ +import { BoxProps } from '@chakra-ui/react'; + +export const textareaMinH = '22px'; + +export const MessageCardStyle: BoxProps = { + px: 4, + py: 3, + borderRadius: '0 8px 8px 8px', + boxShadow: 'none', + display: 'inline-block', + maxW: ['calc(100% - 25px)', 'calc(100% - 40px)'], + color: 'myGray.900' +}; diff --git a/projects/app/src/components/ChatBox/hooks/useChatBox.tsx b/projects/app/src/components/ChatBox/hooks/useChatBox.tsx new file mode 100644 index 000000000..31adb6a60 --- /dev/null +++ b/projects/app/src/components/ChatBox/hooks/useChatBox.tsx @@ -0,0 +1,78 @@ +import { ExportChatType } from '@/types/chat'; +import { ChatItemType } from '@fastgpt/global/core/chat/type'; +import { useCallback } from 'react'; +import { htmlTemplate } from '@/constants/common'; +import { fileDownload } from '@/web/common/file/utils'; + +export const useChatBox = () => { + const onExportChat = useCallback( + ({ type, history }: { type: ExportChatType; history: ChatItemType[] }) => { + const getHistoryHtml = () => { + const historyDom = document.getElementById('history'); + if (!historyDom) return; + const dom = Array.from(historyDom.children).map((child, i) => { + const avatar = ``; + + const chatContent = child.querySelector('.markdown'); + + if (!chatContent) { + return ''; + } + + const chatContentClone = chatContent.cloneNode(true) as HTMLDivElement; + + const codeHeader = chatContentClone.querySelectorAll('.code-header'); + codeHeader.forEach((childElement: any) => { + childElement.remove(); + }); + + return `
+ ${avatar} + ${chatContentClone.outerHTML} +
`; + }); + + const html = htmlTemplate.replace('{{CHAT_CONTENT}}', dom.join('\n')); + return html; + }; + + const map: Record void> = { + md: () => { + fileDownload({ + text: history.map((item) => item.value).join('\n\n'), + type: 'text/markdown', + filename: 'chat.md' + }); + }, + html: () => { + const html = getHistoryHtml(); + html && + fileDownload({ + text: html, + type: 'text/html', + filename: '聊天记录.html' + }); + }, + pdf: () => { + const html = getHistoryHtml(); + + html && + // @ts-ignore + html2pdf(html, { + margin: 0, + filename: `聊天记录.pdf` + }); + } + }; + + map[type](); + }, + [] + ); + + return { + onExportChat + }; +}; diff --git a/projects/app/src/components/ChatBox/index.tsx b/projects/app/src/components/ChatBox/index.tsx index c1afc6120..25e197c84 100644 --- a/projects/app/src/components/ChatBox/index.tsx +++ b/projects/app/src/components/ChatBox/index.tsx @@ -10,39 +10,25 @@ import React, { } from 'react'; import Script from 'next/script'; import { throttle } from 'lodash'; -import type { ExportChatType } from '@/types/chat.d'; -import type { ChatItemType, ChatSiteItemType } from '@fastgpt/global/core/chat/type.d'; +import type { + AIChatItemType, + AIChatItemValueItemType, + ChatSiteItemType, + UserChatItemValueItemType +} from '@fastgpt/global/core/chat/type.d'; import type { ChatHistoryItemResType } from '@fastgpt/global/core/chat/type.d'; import { useToast } from '@fastgpt/web/hooks/useToast'; -import { useAudioPlay } from '@/web/common/utils/voice'; import { getErrText } from '@fastgpt/global/common/error/utils'; -import { useCopyData } from '@/web/common/hooks/useCopyData'; -import { - Box, - Card, - Flex, - Input, - Button, - useTheme, - BoxProps, - FlexProps, - Image, - Textarea, - Checkbox -} from '@chakra-ui/react'; +import { Box, Flex, Checkbox } from '@chakra-ui/react'; import { EventNameEnum, eventBus } from '@/web/common/utils/eventbus'; -import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt'; -import { useMarkdown } from '@/web/common/hooks/useMarkdown'; +import { chats2GPTMessages } from '@fastgpt/global/core/chat/adapt'; import { ModuleItemType } from '@fastgpt/global/core/module/type.d'; import { VariableInputEnum } from '@fastgpt/global/core/module/constants'; -import { UseFormReturn, useForm } from 'react-hook-form'; -import type { ChatMessageItemType } from '@fastgpt/global/core/ai/type.d'; -import { fileDownload } from '@/web/common/file/utils'; -import { htmlTemplate } from '@/constants/common'; +import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/module/runtime/constants'; +import { useForm } from 'react-hook-form'; import { useRouter } from 'next/router'; import { useSystemStore } from '@/web/common/system/useSystemStore'; import { useTranslation } from 'next-i18next'; -import { customAlphabet } from 'nanoid'; import { closeCustomFeedback, updateChatAdminFeedback, @@ -50,47 +36,35 @@ import { } from '@/web/core/chat/api'; import type { AdminMarkType } from './SelectMarkCollection'; -import MyIcon from '@fastgpt/web/components/common/Icon'; -import Avatar from '@/components/Avatar'; -import Markdown, { CodeClassName } from '@/components/Markdown'; import MyTooltip from '../MyTooltip'; + +import { postQuestionGuide } from '@/web/core/ai/api'; +import { splitGuideModule } from '@fastgpt/global/core/module/utils'; +import type { + generatingMessageProps, + StartChatFnProps, + ComponentRef, + ChatBoxInputType, + ChatBoxInputFormType +} from './type.d'; +import MessageInput from './MessageInput'; +import ChatBoxDivider from '../core/chat/Divider'; +import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; +import { getNanoid } from '@fastgpt/global/common/string/tools'; +import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants'; +import { formatChatValue2InputType } from './utils'; +import { textareaMinH } from './constants'; +import { SseResponseEventEnum } from '@fastgpt/global/core/module/runtime/constants'; +import ChatItem from './components/ChatItem'; + import dynamic from 'next/dynamic'; const ResponseTags = dynamic(() => import('./ResponseTags')); const FeedbackModal = dynamic(() => import('./FeedbackModal')); const ReadFeedbackModal = dynamic(() => import('./ReadFeedbackModal')); const SelectMarkCollection = dynamic(() => import('./SelectMarkCollection')); - -import styles from './index.module.scss'; -import { postQuestionGuide } from '@/web/core/ai/api'; -import { splitGuideModule } from '@fastgpt/global/core/module/utils'; -import type { AppTTSConfigType, VariableItemType } from '@fastgpt/global/core/module/type.d'; -import MessageInput from './MessageInput'; -import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants'; -import ChatBoxDivider from '../core/chat/Divider'; -import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat'; -import MySelect from '@fastgpt/web/components/common/MySelect'; - -const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 24); - -const textareaMinH = '22px'; - -type generatingMessageProps = { text?: string; name?: string; status?: 'running' | 'finish' }; - -export type StartChatFnProps = { - chatList: ChatSiteItemType[]; - messages: ChatMessageItemType[]; - controller: AbortController; - variables: Record; - generatingMessage: (e: generatingMessageProps) => void; -}; - -export type ComponentRef = { - getChatHistories: () => ChatSiteItemType[]; - resetVariables: (data?: Record) => void; - resetHistory: (history: ChatSiteItemType[]) => void; - scrollToBottom: (behavior?: 'smooth' | 'auto') => void; - sendPrompt: (question: string) => void; -}; +const Empty = dynamic(() => import('./components/Empty')); +const WelcomeBox = dynamic(() => import('./components/WelcomeBox')); +const VariableInput = dynamic(() => import('./components/VariableInput')); enum FeedbackTypeEnum { user = 'user', @@ -98,15 +72,6 @@ enum FeedbackTypeEnum { hidden = 'hidden' } -const MessageCardStyle: BoxProps = { - px: 4, - py: 3, - borderRadius: '0 8px 8px 8px', - boxShadow: '0 0 8px rgba(0,0,0,0.15)', - display: 'inline-block', - maxW: ['calc(100% - 25px)', 'calc(100% - 40px)'] -}; - type Props = OutLinkChatAuthProps & { feedbackType?: `${FeedbackTypeEnum}`; showMarkIcon?: boolean; // admin mark dataset @@ -125,12 +90,20 @@ type Props = OutLinkChatAuthProps & { onUpdateVariable?: (e: Record) => void; onStartChat?: (e: StartChatFnProps) => Promise<{ responseText: string; - [ModuleOutputKeyEnum.responseData]: ChatHistoryItemResType[]; + [DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[]; isNewChat?: boolean; }>; - onDelMessage?: (e: { contentId?: string; index: number }) => void; + onDelMessage?: (e: { contentId: string }) => void; }; +/* + The input is divided into sections + 1. text + 2. img + 3. file + 4. .... +*/ + const ChatBox = ( { feedbackType = FeedbackTypeEnum.hidden, @@ -155,7 +128,6 @@ const ChatBox = ( ref: ForwardedRef ) => { const ChatBoxRef = useRef(null); - const theme = useTheme(); const router = useRouter(); const { t } = useTranslation(); const { toast } = useToast(); @@ -165,8 +137,7 @@ const ChatBox = ( const questionGuideController = useRef(new AbortController()); const isNewChatReplace = useRef(false); - const [refresh, setRefresh] = useState(false); - const [chatHistory, setChatHistory] = useState([]); + const [chatHistories, setChatHistories] = useState([]); const [feedbackId, setFeedbackId] = useState(); const [readFeedbackData, setReadFeedbackData] = useState<{ chatItemId: string; @@ -177,9 +148,9 @@ const ChatBox = ( const isChatting = useMemo( () => - chatHistory[chatHistory.length - 1] && - chatHistory[chatHistory.length - 1]?.status !== 'finish', - [chatHistory] + chatHistories[chatHistories.length - 1] && + chatHistories[chatHistories.length - 1]?.status !== 'finish', + [chatHistories] ); const { welcomeText, variableModules, questionGuide, ttsConfig } = useMemo( @@ -192,19 +163,20 @@ const ChatBox = ( ); // compute variable input is finish. - const chatForm = useForm<{ - variables: Record; - }>({ + const chatForm = useForm({ defaultValues: { - variables: {} + input: '', + files: [], + variables: {}, + chatStarted: false } }); - const { setValue, watch, handleSubmit } = chatForm; + const { setValue, watch, handleSubmit, control } = chatForm; const variables = watch('variables'); + const chatStarted = watch('chatStarted'); - const [variableInputFinish, setVariableInputFinish] = useState(false); // clicked start chat button const variableIsFinish = useMemo(() => { - if (!filterVariableModules || filterVariableModules.length === 0 || chatHistory.length > 0) + if (!filterVariableModules || filterVariableModules.length === 0 || chatHistories.length > 0) return true; for (let i = 0; i < filterVariableModules.length; i++) { @@ -214,8 +186,8 @@ const ChatBox = ( } } - return variableInputFinish; - }, [chatHistory.length, variableInputFinish, filterVariableModules, variables]); + return chatStarted; + }, [filterVariableModules, chatHistories.length, chatStarted, variables]); // 滚动到底部 const scrollToBottom = (behavior: 'smooth' | 'auto' = 'smooth') => { @@ -240,24 +212,92 @@ const ChatBox = ( ); // eslint-disable-next-line react-hooks/exhaustive-deps const generatingMessage = useCallback( - ({ text = '', status, name }: generatingMessageProps) => { - setChatHistory((state) => + ({ event, text = '', status, name, tool }: generatingMessageProps) => { + setChatHistories((state) => state.map((item, index) => { if (index !== state.length - 1) return item; - return { - ...item, - ...(text - ? { - value: item.value + text + if (item.obj !== ChatRoleEnum.AI) return item; + + const lastValue: AIChatItemValueItemType = JSON.parse( + JSON.stringify(item.value[item.value.length - 1]) + ); + + if (event === SseResponseEventEnum.flowNodeStatus && status) { + return { + ...item, + status, + moduleName: name + }; + } else if ( + (event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer) && + text + ) { + if (!lastValue || !lastValue.text) { + const newValue: AIChatItemValueItemType = { + type: ChatItemValueTypeEnum.text, + text: { + content: text } - : {}), - ...(status && name - ? { - status, - moduleName: name + }; + return { + ...item, + value: item.value.concat(newValue) + }; + } else { + lastValue.text.content += text; + return { + ...item, + value: item.value.slice(0, -1).concat(lastValue) + }; + } + } else if (event === SseResponseEventEnum.toolCall && tool) { + const val: AIChatItemValueItemType = { + type: ChatItemValueTypeEnum.tool, + tools: [tool] + }; + return { + ...item, + value: + lastValue && lastValue.text + ? item.value.slice(0, -1).concat(val) + : item.value.concat(val) + }; + } else if ( + event === SseResponseEventEnum.toolParams && + tool && + lastValue.type === ChatItemValueTypeEnum.tool && + lastValue?.tools + ) { + lastValue.tools = lastValue.tools.map((item) => { + if (item.id === tool.id) { + item.params += tool.params; + } + return item; + }); + return { + ...item, + value: item.value.slice(0, -1).concat(lastValue) + }; + } else if (event === SseResponseEventEnum.toolResponse && tool) { + // replace tool response + return { + ...item, + value: item.value.map((val) => { + if (val.type === ChatItemValueTypeEnum.tool && val.tools) { + const tools = val.tools.map((item) => + item.id === tool.id ? { ...item, response: tool.response } : item + ); + return { + ...val, + tools + }; } - : {}) - }; + return val; + }) + }; + } + + return item; }) ); generatingScroll(); @@ -266,19 +306,22 @@ const ChatBox = ( ); // 重置输入内容 - const resetInputVal = useCallback((val: string) => { - if (!TextareaDom.current) return; + const resetInputVal = useCallback( + ({ text = '', files = [] }: ChatBoxInputType) => { + if (!TextareaDom.current) return; + setValue('files', files); + setValue('input', text); - setTimeout(() => { - /* 回到最小高度 */ - if (TextareaDom.current) { - TextareaDom.current.value = val; - TextareaDom.current.style.height = - val === '' ? textareaMinH : `${TextareaDom.current.scrollHeight}px`; - } - setRefresh((state) => !state); - }, 100); - }, []); + setTimeout(() => { + /* 回到最小高度 */ + if (TextareaDom.current) { + TextareaDom.current.style.height = + text === '' ? textareaMinH : `${TextareaDom.current.scrollHeight}px`; + } + }, 100); + }, + [setValue] + ); // create question guide const createQuestionGuide = useCallback( @@ -291,7 +334,7 @@ const ChatBox = ( const result = await postQuestionGuide( { - messages: adaptChat2GptMessages({ messages: history, reserveId: false }).slice(-6), + messages: chats2GPTMessages({ messages: history, reserveId: false }).slice(-6), shareId, outLinkUid, teamId, @@ -315,10 +358,10 @@ const ChatBox = ( */ const sendPrompt = useCallback( ({ - inputVal = '', - history = chatHistory - }: { - inputVal?: string; + text = '', + files = [], + history = chatHistories + }: ChatBoxInputType & { history?: ChatSiteItemType[]; }) => { handleSubmit(async ({ variables }) => { @@ -331,10 +374,9 @@ const ChatBox = ( return; } questionGuideController.current?.abort('stop'); - // get input value - const val = inputVal.trim(); + text = text.trim(); - if (!val) { + if (!text && files.length === 0) { toast({ title: '内容为空', status: 'warning' @@ -345,24 +387,50 @@ const ChatBox = ( const newChatList: ChatSiteItemType[] = [ ...history, { - dataId: nanoid(), - obj: 'Human', - value: val, + dataId: getNanoid(24), + obj: ChatRoleEnum.Human, + value: [ + ...files.map((file) => ({ + type: ChatItemValueTypeEnum.file, + file: { + type: file.type, + name: file.name, + url: file.url || '' + } + })), + ...(text + ? [ + { + type: ChatItemValueTypeEnum.text, + text: { + content: text + } + } + ] + : []) + ] as UserChatItemValueItemType[], status: 'finish' }, { - dataId: nanoid(), - obj: 'AI', - value: '', + dataId: getNanoid(24), + obj: ChatRoleEnum.AI, + value: [ + { + type: ChatItemValueTypeEnum.text, + text: { + content: '' + } + } + ], status: 'loading' } ]; // 插入内容 - setChatHistory(newChatList); + setChatHistories(newChatList); // 清空输入内容 - resetInputVal(''); + resetInputVal({}); setQuestionGuide([]); setTimeout(() => { scrollToBottom(); @@ -372,20 +440,13 @@ const ChatBox = ( const abortSignal = new AbortController(); chatController.current = abortSignal; - const messages = adaptChat2GptMessages({ messages: newChatList, reserveId: true }); - + const messages = chats2GPTMessages({ messages: newChatList, reserveId: true }); const { responseData, responseText, isNewChat = false } = await onStartChat({ - chatList: newChatList.map((item) => ({ - dataId: item.dataId, - obj: item.obj, - value: item.value, - status: item.status, - moduleName: item.moduleName - })), + chatList: newChatList, messages, controller: abortSignal, generatingMessage, @@ -395,7 +456,7 @@ const ChatBox = ( isNewChatReplace.current = isNewChat; // set finish status - setChatHistory((state) => + setChatHistories((state) => state.map((item, index) => { if (index !== state.length - 1) return item; return { @@ -411,7 +472,14 @@ const ChatBox = ( i === newChatList.length - 1 ? { ...item, - value: responseText + value: [ + { + type: ChatItemValueTypeEnum.text, + text: { + content: responseText + } + } + ] } : item ) @@ -428,12 +496,12 @@ const ChatBox = ( }); if (!err?.responseText) { - resetInputVal(inputVal); - setChatHistory(newChatList.slice(0, newChatList.length - 2)); + resetInputVal({ text, files }); + setChatHistories(newChatList.slice(0, newChatList.length - 2)); } // set finish status - setChatHistory((state) => + setChatHistories((state) => state.map((item, index) => { if (index !== state.length - 1) return item; return { @@ -446,7 +514,7 @@ const ChatBox = ( })(); }, [ - chatHistory, + chatHistories, createQuestionGuide, generatingMessage, generatingScroll, @@ -462,72 +530,222 @@ const ChatBox = ( // retry input const retryInput = useCallback( - async (index: number) => { - if (!onDelMessage) return; - const delHistory = chatHistory.slice(index); + (dataId?: string) => { + if (!dataId || !onDelMessage) return; - setLoading(true); + return async () => { + setLoading(true); + const index = chatHistories.findIndex((item) => item.dataId === dataId); + const delHistory = chatHistories.slice(index); - try { - await Promise.all( - delHistory.map((item, i) => onDelMessage({ contentId: item.dataId, index: index + i })) - ); - setChatHistory((state) => (index === 0 ? [] : state.slice(0, index))); + try { + await Promise.all( + delHistory.map(async (item) => { + if (item.dataId) { + return onDelMessage({ contentId: item.dataId }); + } + }) + ); + setChatHistories((state) => (index === 0 ? [] : state.slice(0, index))); - sendPrompt({ - inputVal: delHistory[0].value, - history: chatHistory.slice(0, index) - }); - } catch (error) {} - setLoading(false); + sendPrompt({ + ...formatChatValue2InputType(delHistory[0].value), + history: chatHistories.slice(0, index) + }); + } catch (error) { + toast({ + status: 'warning', + title: getErrText(error, 'Retry failed') + }); + } + setLoading(false); + }; }, - [chatHistory, onDelMessage, sendPrompt, setLoading] + [chatHistories, onDelMessage, sendPrompt, setLoading, toast] ); // delete one message const delOneMessage = useCallback( - ({ dataId, index }: { dataId?: string; index: number }) => { - setChatHistory((state) => state.filter((chat) => chat.dataId !== dataId)); - onDelMessage?.({ - contentId: dataId, - index - }); + (dataId?: string) => { + if (!dataId || !onDelMessage) return; + return () => { + setChatHistories((state) => state.filter((chat) => chat.dataId !== dataId)); + onDelMessage({ + contentId: dataId + }); + }; }, [onDelMessage] ); + // admin mark + const onMark = useCallback( + (chat: ChatSiteItemType, q = '') => { + if (!showMarkIcon || chat.obj !== ChatRoleEnum.AI) return; - // output data - useImperativeHandle(ref, () => ({ - getChatHistories: () => chatHistory, - resetVariables(e) { - const defaultVal: Record = {}; - filterVariableModules?.forEach((item) => { - defaultVal[item.key] = ''; - }); + return () => { + if (!chat.dataId) return; - setValue('variables', e || defaultVal); + if (chat.adminFeedback) { + setAdminMarkData({ + chatItemId: chat.dataId, + datasetId: chat.adminFeedback.datasetId, + collectionId: chat.adminFeedback.collectionId, + dataId: chat.adminFeedback.dataId, + q: chat.adminFeedback.q || q || '', + a: chat.adminFeedback.a + }); + } else { + setAdminMarkData({ + chatItemId: chat.dataId, + q, + a: formatChatValue2InputType(chat.value).text + }); + } + }; }, - resetHistory(e) { - setVariableInputFinish(!!e.length); - setChatHistory(e); - }, - scrollToBottom, - sendPrompt: (question: string) => { - sendPrompt({ - inputVal: question - }); - } - })); + [showMarkIcon] + ); + const onAddUserLike = useCallback( + (chat: ChatSiteItemType) => { + if ( + feedbackType !== FeedbackTypeEnum.user || + chat.obj !== ChatRoleEnum.AI || + chat.userBadFeedback + ) + return; + return () => { + if (!chat.dataId || !chatId || !appId) return; + + const isGoodFeedback = !!chat.userGoodFeedback; + setChatHistories((state) => + state.map((chatItem) => + chatItem.dataId === chat.dataId + ? { + ...chatItem, + userGoodFeedback: isGoodFeedback ? undefined : 'yes' + } + : chatItem + ) + ); + try { + updateChatUserFeedback({ + appId, + chatId, + chatItemId: chat.dataId, + shareId, + outLinkUid, + userGoodFeedback: isGoodFeedback ? undefined : 'yes' + }); + } catch (error) {} + }; + }, + [appId, chatId, feedbackType, outLinkUid, shareId] + ); + const onCloseUserLike = useCallback( + (chat: ChatSiteItemType) => { + if (feedbackType !== FeedbackTypeEnum.admin) return; + return () => { + if (!chat.dataId || !chatId || !appId) return; + setChatHistories((state) => + state.map((chatItem) => + chatItem.dataId === chat.dataId + ? { ...chatItem, userGoodFeedback: undefined } + : chatItem + ) + ); + updateChatUserFeedback({ + appId, + chatId, + chatItemId: chat.dataId, + userGoodFeedback: undefined + }); + }; + }, + [appId, chatId, feedbackType] + ); + const onADdUserDislike = useCallback( + (chat: ChatSiteItemType) => { + if ( + feedbackType !== FeedbackTypeEnum.user || + chat.obj !== ChatRoleEnum.AI || + chat.userGoodFeedback + ) { + return; + } + if (chat.userBadFeedback) { + return () => { + if (!chat.dataId || !chatId || !appId) return; + setChatHistories((state) => + state.map((chatItem) => + chatItem.dataId === chat.dataId + ? { ...chatItem, userBadFeedback: undefined } + : chatItem + ) + ); + try { + updateChatUserFeedback({ + appId, + chatId, + chatItemId: chat.dataId, + shareId, + outLinkUid + }); + } catch (error) {} + }; + } else { + return () => setFeedbackId(chat.dataId); + } + }, + [appId, chatId, feedbackType, outLinkUid, shareId] + ); + const onReadUserDislike = useCallback( + (chat: ChatSiteItemType) => { + if (feedbackType !== FeedbackTypeEnum.admin || chat.obj !== ChatRoleEnum.AI) return; + return () => { + if (!chat.dataId) return; + setReadFeedbackData({ + chatItemId: chat.dataId || '', + content: chat.userBadFeedback || '' + }); + }; + }, + [feedbackType] + ); + const onCloseCustomFeedback = useCallback( + (chat: ChatSiteItemType, i: number) => { + return (e: React.ChangeEvent) => { + if (e.target.checked && appId && chatId && chat.dataId) { + closeCustomFeedback({ + appId, + chatId, + chatItemId: chat.dataId, + index: i + }); + // update dom + setChatHistories((state) => + state.map((chatItem) => + chatItem.obj === ChatRoleEnum.AI && chatItem.dataId === chat.dataId + ? { + ...chatItem, + customFeedbacks: chatItem.customFeedbacks?.filter((_, index) => index !== i) + } + : chatItem + ) + ); + } + }; + }, + [appId, chatId] + ); - /* style start */ const showEmpty = useMemo( () => feConfigs?.show_emptyChat && showEmptyIntro && - chatHistory.length === 0 && + chatHistories.length === 0 && !filterVariableModules?.length && !welcomeText, [ - chatHistory.length, + chatHistories.length, feConfigs?.show_emptyChat, showEmptyIntro, filterVariableModules?.length, @@ -535,21 +753,15 @@ const ChatBox = ( ] ); const statusBoxData = useMemo(() => { - const colorMap = { - loading: 'myGray.700', - running: '#67c13b', - finish: 'primary.500' - }; if (!isChatting) return; - const chatContent = chatHistory[chatHistory.length - 1]; + const chatContent = chatHistories[chatHistories.length - 1]; if (!chatContent) return; return { - bg: colorMap[chatContent.status] || colorMap.loading, + status: chatContent.status || 'loading', name: t(chatContent.moduleName || '') || t('common.Loading') }; - }, [chatHistory, isChatting, t]); - /* style end */ + }, [chatHistories, isChatting, t]); // page change and abort request useEffect(() => { @@ -568,7 +780,7 @@ const ChatBox = ( const windowMessage = ({ data }: MessageEvent<{ type: 'sendPrompt'; text: string }>) => { if (data?.type === 'sendPrompt' && data?.text) { sendPrompt({ - inputVal: data.text + text: data.text }); } }; @@ -577,12 +789,12 @@ const ChatBox = ( eventBus.on(EventNameEnum.sendQuestion, ({ text }: { text: string }) => { if (!text) return; sendPrompt({ - inputVal: text + text }); }); eventBus.on(EventNameEnum.editQuestion, ({ text }: { text: string }) => { if (!text) return; - resetInputVal(text); + resetInputVal({ text }); }); return () => { @@ -592,13 +804,28 @@ const ChatBox = ( }; }, [resetInputVal, sendPrompt]); - const onSubmitVariables = useCallback( - (data: Record) => { - setVariableInputFinish(true); - onUpdateVariable?.(data); + // output data + useImperativeHandle(ref, () => ({ + getChatHistories: () => chatHistories, + resetVariables(e) { + const defaultVal: Record = {}; + filterVariableModules?.forEach((item) => { + defaultVal[item.key] = ''; + }); + + setValue('variables', e || defaultVal); }, - [onUpdateVariable] - ); + resetHistory(e) { + setValue('chatStarted', e.length > 0); + setChatHistories(e); + }, + scrollToBottom, + sendPrompt: (question: string) => { + sendPrompt({ + text: question + }); + } + })); return ( @@ -607,7 +834,7 @@ const ChatBox = ( {showEmpty && } - {!!welcomeText && } + {!!welcomeText && } {/* variable input */} {!!filterVariableModules?.length && ( { + setValue('chatStarted', true); + onUpdateVariable?.(data); + }} /> )} {/* chat history */} - {chatHistory.map((item, index) => ( + {chatHistories.map((item, index) => ( {item.obj === 'Human' && ( - <> - {/* control icon */} - - { - delOneMessage({ dataId: item.dataId, index }); - } - : undefined - } - onRetry={() => retryInput(index)} - /> - - - {/* content */} - - - - - - + )} {item.obj === 'AI' && ( <> - - - {/* control icon */} - { - delOneMessage({ dataId: item.dataId, index }); - } - : undefined - } - onMark={ - showMarkIcon - ? () => { - if (!item.dataId) return; - if (item.adminFeedback) { - setAdminMarkData({ - chatItemId: item.dataId, - datasetId: item.adminFeedback.datasetId, - collectionId: item.adminFeedback.collectionId, - dataId: item.adminFeedback.dataId, - q: item.adminFeedback.q || chatHistory[index - 1]?.value || '', - a: item.adminFeedback.a - }); - } else { - setAdminMarkData({ - chatItemId: item.dataId, - q: chatHistory[index - 1]?.value || '', - a: item.value - }); - } - } - : undefined - } - onAddUserLike={ - feedbackType !== FeedbackTypeEnum.user || item.userBadFeedback - ? undefined - : () => { - if (!item.dataId || !chatId || !appId) return; - - const isGoodFeedback = !!item.userGoodFeedback; - setChatHistory((state) => - state.map((chatItem) => - chatItem.dataId === item.dataId - ? { - ...chatItem, - userGoodFeedback: isGoodFeedback ? undefined : 'yes' - } - : chatItem - ) - ); - try { - updateChatUserFeedback({ - appId, - chatId, - chatItemId: item.dataId, - shareId, - outLinkUid, - userGoodFeedback: isGoodFeedback ? undefined : 'yes' - }); - } catch (error) {} - } - } - onCloseUserLike={ - feedbackType === FeedbackTypeEnum.admin - ? () => { - if (!item.dataId || !chatId || !appId) return; - setChatHistory((state) => - state.map((chatItem) => - chatItem.dataId === item.dataId - ? { ...chatItem, userGoodFeedback: undefined } - : chatItem - ) - ); - updateChatUserFeedback({ - appId, - chatId, - chatItemId: item.dataId, - userGoodFeedback: undefined - }); - } - : undefined - } - onAddUserDislike={(() => { - if (feedbackType !== FeedbackTypeEnum.user || item.userGoodFeedback) { - return; - } - if (item.userBadFeedback) { - return () => { - if (!item.dataId || !chatId || !appId) return; - setChatHistory((state) => - state.map((chatItem) => - chatItem.dataId === item.dataId - ? { ...chatItem, userBadFeedback: undefined } - : chatItem - ) - ); - try { - updateChatUserFeedback({ - appId, - chatId, - chatItemId: item.dataId, - shareId, - outLinkUid - }); - } catch (error) {} - }; - } else { - return () => setFeedbackId(item.dataId); - } - })()} - onReadUserDislike={ - feedbackType === FeedbackTypeEnum.admin - ? () => { - if (!item.dataId) return; - setReadFeedbackData({ - chatItemId: item.dataId || '', - content: item.userBadFeedback || '' - }); - } - : undefined - } + + - {/* chatting status */} - {statusBoxData && index === chatHistory.length - 1 && ( - - - - {statusBoxData.name} - - - )} - - {/* content */} - - - { - const text = item.value as string; - // replace quote tag: [source1] 标识第一个来源,需要提取数字1,从而去数组里查找来源 - const quoteReg = /\[source:(.+)\]/g; - const replaceText = text.replace(quoteReg, `[QUOTE SIGN]($1)`); - - // question guide - if ( - index === chatHistory.length - 1 && - !isChatting && - questionGuides.length > 0 - ) { - return `${replaceText}\n\`\`\`${ - CodeClassName.questionGuide - }\n${JSON.stringify(questionGuides)}`; - } - return replaceText; - })()} - isChatting={index === chatHistory.length - 1 && isChatting} - /> - - - - {/* custom feedback */} - {item.customFeedbacks && item.customFeedbacks.length > 0 && ( - - - {item.customFeedbacks.map((text, i) => ( - - - { - if (e.target.checked && appId && chatId && item.dataId) { - closeCustomFeedback({ - appId, - chatId, - chatItemId: item.dataId, - index: i - }); - // update dom - setChatHistory((state) => - state.map((chatItem) => - chatItem.dataId === item.dataId - ? { - ...chatItem, - customFeedbacks: chatItem.customFeedbacks?.filter( - (item, index) => index !== i - ) - } - : chatItem - ) - ); - } - console.log(e); - }} - > - {text} - - - - ))} - - )} - {/* admin mark content */} - {showMarkIcon && item.adminFeedback && ( - - - - {item.adminFeedback.q} - {item.adminFeedback.a} + {/* custom feedback */} + {item.customFeedbacks && item.customFeedbacks.length > 0 && ( + + + {item.customFeedbacks.map((text, i) => ( + + + + {text} + + + ))} + + )} + {/* admin mark content */} + {showMarkIcon && item.adminFeedback && ( + + + + {item.adminFeedback.q} + {item.adminFeedback.a} - )} - - + + )} + )} @@ -913,11 +938,7 @@ const ChatBox = ( {/* message input */} {onStartChat && variableIsFinish && active && ( { - sendPrompt({ - inputVal - }); - }} + onSendMessage={sendPrompt} onStop={() => chatController.current?.abort('stop')} isChatting={isChatting} TextareaDom={TextareaDom} @@ -927,6 +948,7 @@ const ChatBox = ( outLinkUid={outLinkUid} teamId={teamId} teamToken={teamToken} + chatForm={chatForm} /> )} {/* user feedback modal */} @@ -939,7 +961,7 @@ const ChatBox = ( outLinkUid={outLinkUid} onClose={() => setFeedbackId(undefined)} onSuccess={(content: string) => { - setChatHistory((state) => + setChatHistories((state) => state.map((item) => item.dataId === feedbackId ? { ...item, userBadFeedback: content } : item ) @@ -954,7 +976,7 @@ const ChatBox = ( content={readFeedbackData.content} onClose={() => setReadFeedbackData(undefined)} onCloseFeedback={() => { - setChatHistory((state) => + setChatHistories((state) => state.map((chatItem) => chatItem.dataId === readFeedbackData.chatItemId ? { ...chatItem, userBadFeedback: undefined } @@ -989,7 +1011,7 @@ const ChatBox = ( }); // update dom - setChatHistory((state) => + setChatHistories((state) => state.map((chatItem) => chatItem.dataId === adminMarkData.chatItemId ? { @@ -1007,7 +1029,7 @@ const ChatBox = ( chatItemId: readFeedbackData.chatItemId, userBadFeedback: undefined }); - setChatHistory((state) => + setChatHistories((state) => state.map((chatItem) => chatItem.dataId === readFeedbackData.chatItemId ? { ...chatItem, userBadFeedback: undefined } @@ -1024,442 +1046,3 @@ const ChatBox = ( }; export default React.memo(forwardRef(ChatBox)); - -export const useChatBox = () => { - const onExportChat = useCallback( - ({ type, history }: { type: ExportChatType; history: ChatItemType[] }) => { - const getHistoryHtml = () => { - const historyDom = document.getElementById('history'); - if (!historyDom) return; - const dom = Array.from(historyDom.children).map((child, i) => { - const avatar = ``; - - const chatContent = child.querySelector('.markdown'); - - if (!chatContent) { - return ''; - } - - const chatContentClone = chatContent.cloneNode(true) as HTMLDivElement; - - const codeHeader = chatContentClone.querySelectorAll('.code-header'); - codeHeader.forEach((childElement: any) => { - childElement.remove(); - }); - - return `
- ${avatar} - ${chatContentClone.outerHTML} -
`; - }); - - const html = htmlTemplate.replace('{{CHAT_CONTENT}}', dom.join('\n')); - return html; - }; - - const map: Record void> = { - md: () => { - fileDownload({ - text: history.map((item) => item.value).join('\n\n'), - type: 'text/markdown', - filename: 'chat.md' - }); - }, - html: () => { - const html = getHistoryHtml(); - html && - fileDownload({ - text: html, - type: 'text/html', - filename: '聊天记录.html' - }); - }, - pdf: () => { - const html = getHistoryHtml(); - - html && - // @ts-ignore - html2pdf(html, { - margin: 0, - filename: `聊天记录.pdf` - }); - } - }; - - map[type](); - }, - [] - ); - - return { - onExportChat - }; -}; - -const WelcomeText = React.memo(function Welcome({ - appAvatar, - welcomeText -}: { - appAvatar?: string; - welcomeText: string; -}) { - return ( - - {/* avatar */} - - {/* message */} - - - - - - - ); -}); -const VariableInput = React.memo(function VariableInput({ - appAvatar, - variableModules, - variableIsFinish, - chatForm, - onSubmitVariables -}: { - appAvatar?: string; - variableModules: VariableItemType[]; - variableIsFinish: boolean; - onSubmitVariables: (e: Record) => void; - chatForm: UseFormReturn<{ - variables: Record; - }>; -}) { - const { t } = useTranslation(); - const [refresh, setRefresh] = useState(false); - const { register, setValue, handleSubmit: handleSubmitChat, watch } = chatForm; - const variables = watch('variables'); - - return ( - - {/* avatar */} - - {/* message */} - - - {variableModules.map((item) => ( - - - {item.label} - {item.required && ( - - * - - )} - - {item.type === VariableInputEnum.input && ( - - )} - {item.type === VariableInputEnum.textarea && ( -