mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
File input (#2270)
* doc * feat: file upload config * perf: chat box file params * feat: markdown show file * feat: chat file store and clear * perf: read file contentType * feat: llm vision config * feat: file url output * perf: plugin error text * perf: image load * feat: ai chat document * perf: file block ui * feat: read file node * feat: file read response field * feat: simple mode support read files * feat: tool call * feat: read file histories * perf: select file * perf: select file config * i18n * i18n * fix: ts; feat: tool response preview result
This commit is contained in:
@@ -119,3 +119,10 @@ export const Prompt_QuotePromptList: PromptTemplateItem[] = [
|
||||
问题:"""{{question}}"""`
|
||||
}
|
||||
];
|
||||
|
||||
// Document quote prompt
|
||||
export const Prompt_DocumentQuote = `将 <Quote></Quote> 中的内容作为你的知识:
|
||||
<Quote>
|
||||
{{quote}}
|
||||
</Quote>
|
||||
`;
|
||||
|
40
packages/global/core/ai/type.d.ts
vendored
40
packages/global/core/ai/type.d.ts
vendored
@@ -2,23 +2,46 @@ import openai from 'openai';
|
||||
import type {
|
||||
ChatCompletionMessageToolCall,
|
||||
ChatCompletionChunk,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionMessageParam as SdkChatCompletionMessageParam,
|
||||
ChatCompletionToolMessageParam,
|
||||
ChatCompletionAssistantMessageParam
|
||||
ChatCompletionAssistantMessageParam,
|
||||
ChatCompletionContentPart as SdkChatCompletionContentPart,
|
||||
ChatCompletionUserMessageParam as SdkChatCompletionUserMessageParam
|
||||
} from 'openai/resources';
|
||||
import { ChatMessageTypeEnum } from './constants';
|
||||
|
||||
export * from 'openai/resources';
|
||||
|
||||
export type ChatCompletionMessageParam = ChatCompletionMessageParam & {
|
||||
// Extension of ChatCompletionMessageParam, Add file url type
|
||||
export type ChatCompletionContentPartFile = {
|
||||
type: 'file_url';
|
||||
name: string;
|
||||
url: string;
|
||||
};
|
||||
// Rewrite ChatCompletionContentPart, Add file type
|
||||
export type ChatCompletionContentPart =
|
||||
| SdkChatCompletionContentPart
|
||||
| ChatCompletionContentPartFile;
|
||||
type CustomChatCompletionUserMessageParam = {
|
||||
content: string | Array<ChatCompletionContentPart>;
|
||||
role: 'user';
|
||||
name?: string;
|
||||
};
|
||||
|
||||
export type ChatCompletionMessageParam = (
|
||||
| Exclude<SdkChatCompletionMessageParam, SdkChatCompletionUserMessageParam>
|
||||
| CustomChatCompletionUserMessageParam
|
||||
) & {
|
||||
dataId?: string;
|
||||
};
|
||||
export type SdkChatCompletionMessageParam = SdkChatCompletionMessageParam;
|
||||
|
||||
/* ToolChoice and functionCall extension */
|
||||
export type ChatCompletionToolMessageParam = ChatCompletionToolMessageParam & { name: string };
|
||||
export type ChatCompletionAssistantToolParam = {
|
||||
role: 'assistant';
|
||||
tool_calls: ChatCompletionMessageToolCall[];
|
||||
};
|
||||
|
||||
export type ChatCompletionMessageToolCall = ChatCompletionMessageToolCall & {
|
||||
toolName?: string;
|
||||
toolAvatar?: string;
|
||||
@@ -28,13 +51,16 @@ export type ChatCompletionMessageFunctionCall = ChatCompletionAssistantMessagePa
|
||||
toolName?: string;
|
||||
toolAvatar?: string;
|
||||
};
|
||||
|
||||
// Stream response
|
||||
export type StreamChatType = Stream<ChatCompletionChunk>;
|
||||
|
||||
export default openai;
|
||||
export * from 'openai';
|
||||
|
||||
// Other
|
||||
export type PromptTemplateItem = {
|
||||
title: string;
|
||||
desc: string;
|
||||
value: string;
|
||||
};
|
||||
|
||||
export default openai;
|
||||
export * from 'openai';
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { AppTTSConfigType, AppWhisperConfigType } from './type';
|
||||
import { AppTTSConfigType, AppFileSelectConfigType, AppWhisperConfigType } from './type';
|
||||
|
||||
export enum AppTypeEnum {
|
||||
folder = 'folder',
|
||||
@@ -23,3 +23,9 @@ export const defaultChatInputGuideConfig = {
|
||||
textList: [],
|
||||
customUrl: ''
|
||||
};
|
||||
|
||||
export const defaultAppSelectFileConfig: AppFileSelectConfigType = {
|
||||
canSelectFile: false,
|
||||
canSelectImg: false,
|
||||
maxFiles: 10
|
||||
};
|
||||
|
10
packages/global/core/app/type.d.ts
vendored
10
packages/global/core/app/type.d.ts
vendored
@@ -1,7 +1,7 @@
|
||||
import type { FlowNodeTemplateType, StoreNodeItemType } from '../workflow/type/node';
|
||||
import { AppTypeEnum } from './constants';
|
||||
import { PermissionTypeEnum } from '../../support/permission/constant';
|
||||
import { VariableInputEnum } from '../workflow/constants';
|
||||
import { NodeInputKeyEnum, VariableInputEnum } from '../workflow/constants';
|
||||
import { SelectedDatasetType } from '../workflow/api';
|
||||
import { DatasetSearchModeEnum } from '../dataset/constants';
|
||||
import { TeamTagSchema as TeamTagsSchemaType } from '@fastgpt/global/support/user/team/type.d';
|
||||
@@ -91,6 +91,7 @@ export type AppChatConfigType = {
|
||||
whisperConfig?: AppWhisperConfigType;
|
||||
scheduledTriggerConfig?: AppScheduledTriggerConfigType;
|
||||
chatInputGuide?: ChatInputGuideConfigType;
|
||||
fileSelectConfig?: AppFileSelectConfigType;
|
||||
};
|
||||
export type SettingAIDataType = {
|
||||
model: string;
|
||||
@@ -98,6 +99,7 @@ export type SettingAIDataType = {
|
||||
maxToken: number;
|
||||
isResponseAnswerText?: boolean;
|
||||
maxHistories?: number;
|
||||
[NodeInputKeyEnum.aiChatVision]?: boolean; // Is open vision mode
|
||||
};
|
||||
|
||||
// variable
|
||||
@@ -134,3 +136,9 @@ export type AppScheduledTriggerConfigType = {
|
||||
timezone: string;
|
||||
defaultPrompt: string;
|
||||
};
|
||||
// File
|
||||
export type AppFileSelectConfigType = {
|
||||
canSelectFile: boolean;
|
||||
canSelectImg: boolean;
|
||||
maxFiles: number;
|
||||
};
|
||||
|
@@ -56,16 +56,21 @@ export const chats2GPTMessages = ({
|
||||
text: item.text?.content || ''
|
||||
};
|
||||
}
|
||||
if (
|
||||
item.type === ChatItemValueTypeEnum.file &&
|
||||
item.file?.type === ChatFileTypeEnum.image
|
||||
) {
|
||||
return {
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
if (item.type === ChatItemValueTypeEnum.file) {
|
||||
if (item.file?.type === ChatFileTypeEnum.image) {
|
||||
return {
|
||||
type: 'image_url',
|
||||
image_url: {
|
||||
url: item.file?.url || ''
|
||||
}
|
||||
};
|
||||
} else if (item.file?.type === ChatFileTypeEnum.file) {
|
||||
return {
|
||||
type: 'file_url',
|
||||
name: item.file?.name || '',
|
||||
url: item.file?.url || ''
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
}
|
||||
})
|
||||
.filter(Boolean) as ChatCompletionContentPart[];
|
||||
@@ -175,6 +180,16 @@ export const GPTMessages2Chats = (
|
||||
url: item.image_url.url
|
||||
}
|
||||
});
|
||||
} else if (item.type === 'file_url') {
|
||||
value.push({
|
||||
// @ts-ignore
|
||||
type: ChatItemValueTypeEnum.file,
|
||||
file: {
|
||||
type: ChatFileTypeEnum.file,
|
||||
name: item.name,
|
||||
url: item.url
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
1
packages/global/core/chat/type.d.ts
vendored
1
packages/global/core/chat/type.d.ts
vendored
@@ -117,6 +117,7 @@ export type ChatItemType = (UserChatItemType | SystemChatItemType | AIChatItemTy
|
||||
dataId?: string;
|
||||
} & ResponseTagItemType;
|
||||
|
||||
// Frontend type
|
||||
export type ChatSiteItemType = (UserChatItemType | SystemChatItemType | AIChatItemType) & {
|
||||
dataId: string;
|
||||
status: `${ChatStatusEnum}`;
|
||||
|
@@ -2,6 +2,7 @@ import { DispatchNodeResponseType } from '../workflow/runtime/type';
|
||||
import { FlowNodeTypeEnum } from '../workflow/node/constant';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from './constants';
|
||||
import { ChatHistoryItemResType, ChatItemType, UserChatItemValueItemType } from './type.d';
|
||||
import { sliceStrStartEnd } from '../../common/string/tools';
|
||||
|
||||
// Concat 2 -> 1, and sort by role
|
||||
export const concatHistories = (histories1: ChatItemType[], histories2: ChatItemType[]) => {
|
||||
@@ -25,6 +26,7 @@ export const getChatTitleFromChatMessage = (message?: ChatItemType, defaultValue
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
// Keep the first n and last n characters
|
||||
export const getHistoryPreview = (
|
||||
completeMessages: ChatItemType[]
|
||||
): {
|
||||
@@ -32,30 +34,44 @@ export const getHistoryPreview = (
|
||||
value: string;
|
||||
}[] => {
|
||||
return completeMessages.map((item, i) => {
|
||||
if (item.obj === ChatRoleEnum.System || i >= completeMessages.length - 2) {
|
||||
return {
|
||||
obj: item.obj,
|
||||
value: item.value?.[0]?.text?.content || ''
|
||||
};
|
||||
}
|
||||
const n = item.obj === ChatRoleEnum.System || i >= completeMessages.length - 2 ? 80 : 40;
|
||||
|
||||
const content = item.value
|
||||
.map((item) => {
|
||||
if (item.text?.content) {
|
||||
const content =
|
||||
item.text.content.length > 20
|
||||
? `${item.text.content.slice(0, 20)}...`
|
||||
: item.text.content;
|
||||
return content;
|
||||
}
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
// Get message text content
|
||||
const rawText = (() => {
|
||||
if (item.obj === ChatRoleEnum.System) {
|
||||
return item.value?.map((item) => item.text?.content).join('') || '';
|
||||
} else if (item.obj === ChatRoleEnum.Human) {
|
||||
return (
|
||||
item.value
|
||||
?.map((item) => {
|
||||
if (item?.text?.content) return item?.text?.content;
|
||||
if (item.file?.type === 'image') return 'Input an image';
|
||||
return '';
|
||||
})
|
||||
.filter(Boolean)
|
||||
.join('\n') || ''
|
||||
);
|
||||
} else if (item.obj === ChatRoleEnum.AI) {
|
||||
return (
|
||||
item.value
|
||||
?.map((item) => {
|
||||
return (
|
||||
item.text?.content || item?.tools?.map((item) => item.toolName).join(',') || ''
|
||||
);
|
||||
})
|
||||
.join('') || ''
|
||||
);
|
||||
}
|
||||
return '';
|
||||
})();
|
||||
|
||||
const startContent = rawText.slice(0, n);
|
||||
const endContent = rawText.length > 2 * n ? rawText.slice(-n) : '';
|
||||
const content = startContent + (rawText.length > n ? ` ...... ` : '') + endContent;
|
||||
|
||||
return {
|
||||
obj: item.obj,
|
||||
value: content
|
||||
value: sliceStrStartEnd(content, 80, 80)
|
||||
};
|
||||
});
|
||||
};
|
||||
|
@@ -75,6 +75,8 @@ export enum NodeInputKeyEnum {
|
||||
aiChatQuoteTemplate = 'quoteTemplate',
|
||||
aiChatQuotePrompt = 'quotePrompt',
|
||||
aiChatDatasetQuote = 'quoteQA',
|
||||
aiChatVision = 'aiChatVision',
|
||||
stringQuoteText = 'stringQuoteText',
|
||||
|
||||
// dataset
|
||||
datasetSelectList = 'datasets',
|
||||
@@ -118,7 +120,10 @@ export enum NodeInputKeyEnum {
|
||||
|
||||
// code
|
||||
code = 'code',
|
||||
codeType = 'codeType' // js|py
|
||||
codeType = 'codeType', // js|py
|
||||
|
||||
// read files
|
||||
fileUrlList = 'fileUrlList'
|
||||
}
|
||||
|
||||
export enum NodeOutputKeyEnum {
|
||||
@@ -133,6 +138,9 @@ export enum NodeOutputKeyEnum {
|
||||
addOutputParam = 'system_addOutputParam',
|
||||
rawResponse = 'system_rawResponse',
|
||||
|
||||
// start
|
||||
userFiles = 'userFiles',
|
||||
|
||||
// dataset
|
||||
datasetQuoteQA = 'quoteQA',
|
||||
|
||||
|
@@ -117,7 +117,8 @@ export enum FlowNodeTypeEnum {
|
||||
variableUpdate = 'variableUpdate',
|
||||
code = 'code',
|
||||
textEditor = 'textEditor',
|
||||
customFeedback = 'customFeedback'
|
||||
customFeedback = 'customFeedback',
|
||||
readFiles = 'readFiles'
|
||||
}
|
||||
|
||||
// node IO value type
|
||||
|
@@ -16,10 +16,12 @@ import { UserModelSchema } from '../../../support/user/type';
|
||||
import { AppDetailType, AppSchema } from '../../app/type';
|
||||
import { RuntimeNodeItemType } from '../runtime/type';
|
||||
import { RuntimeEdgeItemType } from './edge';
|
||||
import { ReadFileNodeResponse } from '../template/system/readFiles/type';
|
||||
|
||||
/* workflow props */
|
||||
export type ChatDispatchProps = {
|
||||
res?: NextApiResponse;
|
||||
requestOrigin?: string;
|
||||
mode: 'test' | 'chat' | 'debug';
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
@@ -30,6 +32,7 @@ export type ChatDispatchProps = {
|
||||
histories: ChatItemType[];
|
||||
variables: Record<string, any>; // global variable
|
||||
query: UserChatItemValueItemType[]; // trigger query
|
||||
chatConfig: AppSchema['chatConfig'];
|
||||
stream: boolean;
|
||||
detail: boolean; // response detail
|
||||
maxRunTimes: number;
|
||||
@@ -146,6 +149,10 @@ export type DispatchNodeResponseType = {
|
||||
|
||||
// plugin
|
||||
pluginOutput?: Record<string, any>;
|
||||
|
||||
// read files
|
||||
readFilesResult?: string;
|
||||
readFiles?: ReadFileNodeResponse;
|
||||
};
|
||||
|
||||
export type DispatchNodeResultType<T> = {
|
||||
@@ -166,4 +173,6 @@ export type AIChatNodeProps = {
|
||||
[NodeInputKeyEnum.aiChatIsResponseText]: boolean;
|
||||
[NodeInputKeyEnum.aiChatQuoteTemplate]?: string;
|
||||
[NodeInputKeyEnum.aiChatQuotePrompt]?: string;
|
||||
[NodeInputKeyEnum.aiChatVision]?: boolean;
|
||||
[NodeInputKeyEnum.stringQuoteText]?: string;
|
||||
};
|
||||
|
@@ -25,6 +25,7 @@ import { VariableUpdateNode } from './system/variableUpdate';
|
||||
import { CodeNode } from './system/sandbox';
|
||||
import { TextEditorNode } from './system/textEditor';
|
||||
import { CustomFeedbackNode } from './system/customFeedback';
|
||||
import { ReadFilesNodes } from './system/readFiles';
|
||||
|
||||
const systemNodes: FlowNodeTemplateType[] = [
|
||||
AiChatModule,
|
||||
@@ -36,6 +37,7 @@ const systemNodes: FlowNodeTemplateType[] = [
|
||||
StopToolNode,
|
||||
ClassifyQuestionModule,
|
||||
ContextExtractModule,
|
||||
ReadFilesNodes,
|
||||
HttpNode468,
|
||||
AiQueryExtension,
|
||||
LafModule,
|
||||
|
@@ -3,6 +3,7 @@ import { FlowNodeInputTypeEnum } from '../node/constant';
|
||||
import { WorkflowIOValueTypeEnum } from '../constants';
|
||||
import { chatNodeSystemPromptTip } from './tip';
|
||||
import { FlowNodeInputItemType } from '../type/io';
|
||||
import { i18nT } from '../../../../web/i18n/utils';
|
||||
|
||||
export const Input_Template_History: FlowNodeInputItemType = {
|
||||
key: NodeInputKeyEnum.history,
|
||||
@@ -64,3 +65,11 @@ export const Input_Template_Dataset_Quote: FlowNodeInputItemType = {
|
||||
description: '',
|
||||
valueType: WorkflowIOValueTypeEnum.datasetQuote
|
||||
};
|
||||
export const Input_Template_Text_Quote: FlowNodeInputItemType = {
|
||||
key: NodeInputKeyEnum.stringQuoteText,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference, FlowNodeInputTypeEnum.textarea],
|
||||
label: i18nT('app:document_quote'),
|
||||
debugLabel: i18nT('app:document_quote'),
|
||||
description: i18nT('app:document_quote_tip'),
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
};
|
||||
|
@@ -15,10 +15,12 @@ import {
|
||||
Input_Template_Dataset_Quote,
|
||||
Input_Template_History,
|
||||
Input_Template_System_Prompt,
|
||||
Input_Template_UserChatInput
|
||||
Input_Template_UserChatInput,
|
||||
Input_Template_Text_Quote
|
||||
} from '../input';
|
||||
import { chatNodeSystemPromptTip } from '../tip';
|
||||
import { getHandleConfig } from '../utils';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
|
||||
export const AiChatModule: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.chatNode,
|
||||
@@ -27,8 +29,8 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
sourceHandle: getHandleConfig(true, true, true, true),
|
||||
targetHandle: getHandleConfig(true, true, true, true),
|
||||
avatar: 'core/workflow/template/aiChat',
|
||||
name: 'AI 对话',
|
||||
intro: 'AI 大模型对话',
|
||||
name: i18nT('workflow:template.ai_chat'),
|
||||
intro: i18nT('workflow:template.ai_chat_intro'),
|
||||
showStatus: true,
|
||||
isTool: true,
|
||||
version: '481',
|
||||
@@ -40,20 +42,14 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: 0,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 10,
|
||||
step: 1
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatMaxToken,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: 2000,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 100,
|
||||
max: 4000,
|
||||
step: 50
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatIsResponseText,
|
||||
@@ -74,6 +70,13 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatVision,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
},
|
||||
// settings modal ---
|
||||
{
|
||||
...Input_Template_System_Prompt,
|
||||
@@ -82,8 +85,9 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
placeholder: chatNodeSystemPromptTip
|
||||
},
|
||||
Input_Template_History,
|
||||
{ ...Input_Template_UserChatInput, toolDescription: '用户问题' },
|
||||
Input_Template_Dataset_Quote
|
||||
Input_Template_Dataset_Quote,
|
||||
Input_Template_Text_Quote,
|
||||
{ ...Input_Template_UserChatInput, toolDescription: '用户问题' }
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
|
@@ -13,9 +13,9 @@ import {
|
||||
import { Input_Template_UserChatInput } from '../input';
|
||||
import { DatasetSearchModeEnum } from '../../../dataset/constants';
|
||||
import { getHandleConfig } from '../utils';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
|
||||
export const Dataset_SEARCH_DESC =
|
||||
'调用“语义检索”和“全文检索”能力,从“知识库”中查找可能与问题相关的参考内容';
|
||||
export const Dataset_SEARCH_DESC = i18nT('workflow:template.dataset_search_intro');
|
||||
|
||||
export const DatasetSearchModule: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.datasetSearchNode,
|
||||
@@ -24,7 +24,7 @@ export const DatasetSearchModule: FlowNodeTemplateType = {
|
||||
sourceHandle: getHandleConfig(true, true, true, true),
|
||||
targetHandle: getHandleConfig(true, true, true, true),
|
||||
avatar: 'core/workflow/template/datasetSearch',
|
||||
name: '知识库搜索',
|
||||
name: i18nT('workflow:template.dataset_search'),
|
||||
intro: Dataset_SEARCH_DESC,
|
||||
showStatus: true,
|
||||
isTool: true,
|
||||
|
@@ -0,0 +1,48 @@
|
||||
import { i18nT } from '../../../../../../web/i18n/utils';
|
||||
import {
|
||||
FlowNodeTemplateTypeEnum,
|
||||
NodeInputKeyEnum,
|
||||
NodeOutputKeyEnum,
|
||||
WorkflowIOValueTypeEnum
|
||||
} from '../../../constants';
|
||||
import {
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeOutputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '../../../node/constant';
|
||||
import { FlowNodeTemplateType } from '../../../type/node';
|
||||
import { getHandleConfig } from '../../utils';
|
||||
|
||||
export const ReadFilesNodes: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.readFiles,
|
||||
templateType: FlowNodeTemplateTypeEnum.tools,
|
||||
flowNodeType: FlowNodeTypeEnum.readFiles,
|
||||
sourceHandle: getHandleConfig(true, true, true, true),
|
||||
targetHandle: getHandleConfig(true, true, true, true),
|
||||
avatar: 'core/app/simpleMode/file',
|
||||
name: i18nT('app:workflow.read_files'),
|
||||
intro: i18nT('app:workflow.read_files_tip'),
|
||||
showStatus: true,
|
||||
version: '489',
|
||||
isTool: true,
|
||||
inputs: [
|
||||
{
|
||||
key: NodeInputKeyEnum.fileUrlList,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.reference],
|
||||
valueType: WorkflowIOValueTypeEnum.arrayString,
|
||||
label: i18nT('app:workflow.file_url'),
|
||||
required: true,
|
||||
value: []
|
||||
}
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
id: NodeOutputKeyEnum.text,
|
||||
key: NodeOutputKeyEnum.text,
|
||||
label: i18nT('app:workflow.read_files_result'),
|
||||
description: i18nT('app:workflow.read_files_result_desc'),
|
||||
valueType: WorkflowIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.static
|
||||
}
|
||||
]
|
||||
};
|
4
packages/global/core/workflow/template/system/readFiles/type.d.ts
vendored
Normal file
4
packages/global/core/workflow/template/system/readFiles/type.d.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
export type ReadFileNodeResponse = {
|
||||
url: string;
|
||||
name: string;
|
||||
}[];
|
@@ -2,6 +2,7 @@ import { FlowNodeTypeEnum } from '../../node/constant';
|
||||
import { FlowNodeTemplateType } from '../../type/node.d';
|
||||
import { FlowNodeTemplateTypeEnum } from '../../constants';
|
||||
import { getHandleConfig } from '../utils';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
|
||||
export const SystemConfigNode: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.systemConfig,
|
||||
@@ -10,8 +11,8 @@ export const SystemConfigNode: FlowNodeTemplateType = {
|
||||
sourceHandle: getHandleConfig(false, false, false, false),
|
||||
targetHandle: getHandleConfig(false, false, false, false),
|
||||
avatar: 'core/workflow/template/systemConfig',
|
||||
name: '系统配置',
|
||||
intro: '可以配置应用的系统参数。',
|
||||
name: i18nT('workflow:template.system_config'),
|
||||
intro: '',
|
||||
unique: true,
|
||||
forbidDelete: true,
|
||||
version: '481',
|
||||
|
@@ -19,6 +19,7 @@ import {
|
||||
import { chatNodeSystemPromptTip } from '../tip';
|
||||
import { LLMModelTypeEnum } from '../../../ai/constants';
|
||||
import { getHandleConfig } from '../utils';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
|
||||
export const ToolModule: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.tools,
|
||||
@@ -27,8 +28,8 @@ export const ToolModule: FlowNodeTemplateType = {
|
||||
sourceHandle: getHandleConfig(true, true, false, true),
|
||||
targetHandle: getHandleConfig(true, true, false, true),
|
||||
avatar: 'core/workflow/template/toolCall',
|
||||
name: '工具调用',
|
||||
intro: '通过AI模型自动选择一个或多个功能块进行调用,也可以对插件进行调用。',
|
||||
name: i18nT('workflow:template.tool_call'),
|
||||
intro: i18nT('workflow:template.tool_call_intro'),
|
||||
showStatus: true,
|
||||
version: '481',
|
||||
inputs: [
|
||||
@@ -41,21 +42,23 @@ export const ToolModule: FlowNodeTemplateType = {
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: 0,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 0,
|
||||
max: 10,
|
||||
step: 1
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatMaxToken,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: 2000,
|
||||
valueType: WorkflowIOValueTypeEnum.number,
|
||||
min: 100,
|
||||
max: 4000,
|
||||
step: 50
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatVision,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
},
|
||||
|
||||
{
|
||||
...Input_Template_System_Prompt,
|
||||
label: 'core.ai.Prompt',
|
||||
|
@@ -7,6 +7,17 @@ import {
|
||||
} from '../../constants';
|
||||
import { getHandleConfig } from '../utils';
|
||||
import { Input_Template_UserChatInput } from '../input';
|
||||
import { i18nT } from '../../../../../web/i18n/utils';
|
||||
import { FlowNodeOutputItemType } from '../../type/io';
|
||||
|
||||
export const userFilesInput: FlowNodeOutputItemType = {
|
||||
id: NodeOutputKeyEnum.userFiles,
|
||||
key: NodeOutputKeyEnum.userFiles,
|
||||
label: i18nT('app:workflow.user_file_input'),
|
||||
description: i18nT('app:workflow.user_file_input_desc'),
|
||||
type: FlowNodeOutputTypeEnum.static,
|
||||
valueType: WorkflowIOValueTypeEnum.arrayString
|
||||
};
|
||||
|
||||
export const WorkflowStart: FlowNodeTemplateType = {
|
||||
id: FlowNodeTypeEnum.workflowStart,
|
||||
@@ -15,7 +26,7 @@ export const WorkflowStart: FlowNodeTemplateType = {
|
||||
sourceHandle: getHandleConfig(false, true, false, false),
|
||||
targetHandle: getHandleConfig(false, false, false, false),
|
||||
avatar: 'core/workflow/template/workflowStart',
|
||||
name: '流程开始',
|
||||
name: i18nT('workflow:template.workflow_start'),
|
||||
intro: '',
|
||||
forbidDelete: true,
|
||||
unique: true,
|
||||
@@ -25,7 +36,7 @@ export const WorkflowStart: FlowNodeTemplateType = {
|
||||
{
|
||||
id: NodeOutputKeyEnum.userChatInput,
|
||||
key: NodeOutputKeyEnum.userChatInput,
|
||||
label: 'core.module.input.label.user question',
|
||||
label: i18nT('common:core.module.input.label.user question'),
|
||||
type: FlowNodeOutputTypeEnum.static,
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
}
|
||||
|
@@ -82,6 +82,8 @@ export const splitGuideModule = (guideModules?: StoreNodeItemType) => {
|
||||
chatInputGuide
|
||||
};
|
||||
};
|
||||
|
||||
// Get app chat config: db > nodes
|
||||
export const getAppChatConfig = ({
|
||||
chatConfig,
|
||||
systemConfigNode,
|
||||
|
Reference in New Issue
Block a user