mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
4.8.21 feature (#3720)
* agent search demo * edit form force close image select * feat: llm params and doubao1.5 * perf: model error tip * fix: template register path * package
This commit is contained in:
5
packages/global/core/ai/model.d.ts
vendored
5
packages/global/core/ai/model.d.ts
vendored
@@ -26,11 +26,16 @@ type BaseModelItemType = {
|
||||
export type LLMModelItemType = PriceType &
|
||||
BaseModelItemType & {
|
||||
type: ModelTypeEnum.llm;
|
||||
// Model params
|
||||
maxContext: number;
|
||||
maxResponse: number;
|
||||
quoteMaxToken: number;
|
||||
maxTemperature?: number;
|
||||
|
||||
showTopP?: boolean;
|
||||
responseFormatList?: string[];
|
||||
showStopSign?: boolean;
|
||||
|
||||
censor?: boolean;
|
||||
vision?: boolean;
|
||||
reasoning?: boolean;
|
||||
|
20
packages/global/core/app/type.d.ts
vendored
20
packages/global/core/app/type.d.ts
vendored
@@ -74,13 +74,17 @@ export type AppDetailType = AppSchema & {
|
||||
export type AppSimpleEditFormType = {
|
||||
// templateId: string;
|
||||
aiSettings: {
|
||||
model: string;
|
||||
systemPrompt?: string | undefined;
|
||||
temperature?: number;
|
||||
maxToken?: number;
|
||||
isResponseAnswerText: boolean;
|
||||
[NodeInputKeyEnum.aiModel]: string;
|
||||
[NodeInputKeyEnum.aiSystemPrompt]?: string | undefined;
|
||||
[NodeInputKeyEnum.aiChatTemperature]?: number;
|
||||
[NodeInputKeyEnum.aiChatMaxToken]?: number;
|
||||
[NodeInputKeyEnum.aiChatIsResponseText]: boolean;
|
||||
maxHistories: number;
|
||||
[NodeInputKeyEnum.aiChatReasoning]?: boolean;
|
||||
[NodeInputKeyEnum.aiChatReasoning]?: boolean; // Is open reasoning mode
|
||||
[NodeInputKeyEnum.aiChatTopP]?: number;
|
||||
[NodeInputKeyEnum.aiChatStopSign]?: string;
|
||||
[NodeInputKeyEnum.aiChatResponseFormat]?: string;
|
||||
[NodeInputKeyEnum.aiChatJsonSchema]?: string;
|
||||
};
|
||||
dataset: {
|
||||
datasets: SelectedDatasetType;
|
||||
@@ -119,6 +123,10 @@ export type SettingAIDataType = {
|
||||
maxHistories?: number;
|
||||
[NodeInputKeyEnum.aiChatVision]?: boolean; // Is open vision mode
|
||||
[NodeInputKeyEnum.aiChatReasoning]?: boolean; // Is open reasoning mode
|
||||
[NodeInputKeyEnum.aiChatTopP]?: number;
|
||||
[NodeInputKeyEnum.aiChatStopSign]?: string;
|
||||
[NodeInputKeyEnum.aiChatResponseFormat]?: string;
|
||||
[NodeInputKeyEnum.aiChatJsonSchema]?: string;
|
||||
};
|
||||
|
||||
// variable
|
||||
|
@@ -142,6 +142,10 @@ export enum NodeInputKeyEnum {
|
||||
aiChatVision = 'aiChatVision',
|
||||
stringQuoteText = 'stringQuoteText',
|
||||
aiChatReasoning = 'aiChatReasoning',
|
||||
aiChatTopP = 'aiChatTopP',
|
||||
aiChatStopSign = 'aiChatStopSign',
|
||||
aiChatResponseFormat = 'aiChatResponseFormat',
|
||||
aiChatJsonSchema = 'aiChatJsonSchema',
|
||||
|
||||
// dataset
|
||||
datasetSelectList = 'datasets',
|
||||
|
@@ -221,6 +221,10 @@ export type AIChatNodeProps = {
|
||||
[NodeInputKeyEnum.aiChatIsResponseText]: boolean;
|
||||
[NodeInputKeyEnum.aiChatVision]?: boolean;
|
||||
[NodeInputKeyEnum.aiChatReasoning]?: boolean;
|
||||
[NodeInputKeyEnum.aiChatTopP]?: number;
|
||||
[NodeInputKeyEnum.aiChatStopSign]?: string;
|
||||
[NodeInputKeyEnum.aiChatResponseFormat]?: string;
|
||||
[NodeInputKeyEnum.aiChatJsonSchema]?: string;
|
||||
|
||||
[NodeInputKeyEnum.aiChatQuoteRole]?: AiChatQuoteRoleType;
|
||||
[NodeInputKeyEnum.aiChatQuoteTemplate]?: string;
|
||||
|
@@ -63,14 +63,12 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
key: NodeInputKeyEnum.aiChatTemperature,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: undefined,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatMaxToken,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: undefined,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
|
||||
@@ -98,6 +96,30 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatTopP,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatStopSign,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatResponseFormat,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatJsonSchema,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
// settings modal ---
|
||||
{
|
||||
...Input_Template_System_Prompt,
|
||||
@@ -108,7 +130,6 @@ export const AiChatModule: FlowNodeTemplateType = {
|
||||
Input_Template_History,
|
||||
Input_Template_Dataset_Quote,
|
||||
Input_Template_File_Link_Prompt,
|
||||
|
||||
{ ...Input_Template_UserChatInput, toolDescription: i18nT('workflow:user_question') }
|
||||
],
|
||||
outputs: [
|
||||
|
@@ -43,14 +43,12 @@ export const ToolModule: FlowNodeTemplateType = {
|
||||
key: NodeInputKeyEnum.aiChatTemperature,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: undefined,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatMaxToken,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden], // Set in the pop-up window
|
||||
label: '',
|
||||
value: undefined,
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
@@ -60,6 +58,30 @@ export const ToolModule: FlowNodeTemplateType = {
|
||||
valueType: WorkflowIOValueTypeEnum.boolean,
|
||||
value: true
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatTopP,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.number
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatStopSign,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatResponseFormat,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
{
|
||||
key: NodeInputKeyEnum.aiChatJsonSchema,
|
||||
renderTypeList: [FlowNodeInputTypeEnum.hidden],
|
||||
label: '',
|
||||
valueType: WorkflowIOValueTypeEnum.string
|
||||
},
|
||||
|
||||
{
|
||||
...Input_Template_System_Prompt,
|
||||
|
Reference in New Issue
Block a user