mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 20:27:45 +00:00
v4.5 (#403)
This commit is contained in:
@@ -5,7 +5,8 @@ export enum SystemInputEnum {
|
||||
'switch' = 'switch', // a trigger switch
|
||||
'history' = 'history',
|
||||
'userChatInput' = 'userChatInput',
|
||||
'questionGuide' = 'questionGuide'
|
||||
'questionGuide' = 'questionGuide',
|
||||
isResponseAnswerText = 'isResponseAnswerText'
|
||||
}
|
||||
export enum SystemOutputEnum {
|
||||
finish = 'finish'
|
||||
|
@@ -9,7 +9,7 @@ import {
|
||||
} from './index';
|
||||
import type { AppItemType } from '@/types/app';
|
||||
import type { FlowModuleTemplateType } from '@/types/core/app/flow';
|
||||
import { chatModelList } from '@/web/common/store/static';
|
||||
import { chatModelList, cqModelList } from '@/web/common/store/static';
|
||||
import {
|
||||
Input_Template_History,
|
||||
Input_Template_TFSwitch,
|
||||
@@ -136,14 +136,14 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
key: 'model',
|
||||
type: FlowInputItemTypeEnum.selectChatModel,
|
||||
label: '对话模型',
|
||||
value: chatModelList[0]?.model,
|
||||
list: chatModelList.map((item) => ({ label: item.name, value: item.model })),
|
||||
value: chatModelList?.[0]?.model,
|
||||
customData: () => chatModelList,
|
||||
required: true,
|
||||
valueCheck: (val) => !!val
|
||||
},
|
||||
{
|
||||
key: 'temperature',
|
||||
type: FlowInputItemTypeEnum.slider,
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
label: '温度',
|
||||
value: 0,
|
||||
min: 0,
|
||||
@@ -156,20 +156,26 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: 'maxToken',
|
||||
type: FlowInputItemTypeEnum.maxToken,
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
label: '回复上限',
|
||||
value: chatModelList[0] ? chatModelList[0].contextMaxToken / 2 : 2000,
|
||||
value: chatModelList?.[0] ? chatModelList[0].maxToken / 2 : 2000,
|
||||
min: 100,
|
||||
max: chatModelList[0]?.contextMaxToken || 4000,
|
||||
max: chatModelList?.[0]?.maxToken || 4000,
|
||||
step: 50,
|
||||
markList: [
|
||||
{ label: '100', value: 100 },
|
||||
{
|
||||
label: `${chatModelList[0]?.contextMaxToken || 4000}`,
|
||||
value: chatModelList[0]?.contextMaxToken || 4000
|
||||
label: `${chatModelList?.[0]?.maxToken || 4000}`,
|
||||
value: chatModelList?.[0]?.maxToken || 4000
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'aiSettings',
|
||||
type: FlowInputItemTypeEnum.aiSettings,
|
||||
label: '',
|
||||
connected: false
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
type: FlowInputItemTypeEnum.textarea,
|
||||
@@ -180,6 +186,13 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
placeholder: ChatModelSystemTip,
|
||||
value: ''
|
||||
},
|
||||
{
|
||||
key: SystemInputEnum.isResponseAnswerText,
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
label: '返回AI内容',
|
||||
valueType: FlowValueTypeEnum.boolean,
|
||||
value: true
|
||||
},
|
||||
{
|
||||
key: 'quoteTemplate',
|
||||
type: FlowInputItemTypeEnum.hidden,
|
||||
@@ -196,7 +209,7 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: 'quoteQA',
|
||||
type: FlowInputItemTypeEnum.quoteList,
|
||||
type: FlowInputItemTypeEnum.target,
|
||||
label: '引用内容',
|
||||
description: "对象数组格式,结构:\n [{q:'问题',a:'回答'}]",
|
||||
valueType: FlowValueTypeEnum.kbQuote,
|
||||
@@ -216,7 +229,7 @@ export const ChatModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: TaskResponseKeyEnum.answerText,
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '将在 stream 回复完毕后触发',
|
||||
valueType: FlowValueTypeEnum.string,
|
||||
type: FlowOutputItemTypeEnum.source,
|
||||
@@ -330,12 +343,21 @@ export const ClassifyQuestionModule: FlowModuleTemplateType = {
|
||||
showStatus: true,
|
||||
inputs: [
|
||||
Input_Template_TFSwitch,
|
||||
{
|
||||
key: 'model',
|
||||
type: FlowInputItemTypeEnum.selectChatModel,
|
||||
label: '分类模型',
|
||||
value: cqModelList?.[0]?.model,
|
||||
customData: () => cqModelList,
|
||||
required: true,
|
||||
valueCheck: (val) => !!val
|
||||
},
|
||||
{
|
||||
key: 'systemPrompt',
|
||||
type: FlowInputItemTypeEnum.textarea,
|
||||
valueType: FlowValueTypeEnum.string,
|
||||
value: '',
|
||||
label: '系统提示词',
|
||||
label: '背景知识',
|
||||
description:
|
||||
'你可以添加一些特定内容的介绍,从而更好的识别用户的问题类型。这个内容通常是给模型介绍一个它不知道的内容。',
|
||||
placeholder: '例如: \n1. Laf 是一个云函数开发平台……\n2. Sealos 是一个集群操作系统'
|
||||
@@ -504,7 +526,7 @@ export const AppModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: TaskResponseKeyEnum.answerText,
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '将在应用完全结束后触发',
|
||||
valueType: FlowValueTypeEnum.string,
|
||||
type: FlowOutputItemTypeEnum.source,
|
||||
@@ -757,7 +779,7 @@ export const appTemplates: (AppItemType & {
|
||||
outputs: [
|
||||
{
|
||||
key: 'answerText',
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '直接响应,无需配置',
|
||||
type: 'hidden',
|
||||
targets: []
|
||||
@@ -1094,7 +1116,7 @@ export const appTemplates: (AppItemType & {
|
||||
outputs: [
|
||||
{
|
||||
key: 'answerText',
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '直接响应,无需配置',
|
||||
type: 'hidden',
|
||||
targets: []
|
||||
@@ -1401,7 +1423,7 @@ export const appTemplates: (AppItemType & {
|
||||
outputs: [
|
||||
{
|
||||
key: 'answerText',
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '将在 stream 回复完毕后触发',
|
||||
valueType: 'string',
|
||||
type: 'source',
|
||||
@@ -1863,7 +1885,7 @@ export const appTemplates: (AppItemType & {
|
||||
outputs: [
|
||||
{
|
||||
key: 'answerText',
|
||||
label: '模型回复',
|
||||
label: 'AI回复',
|
||||
description: '将在 stream 回复完毕后触发',
|
||||
valueType: 'string',
|
||||
type: 'source',
|
||||
|
@@ -13,7 +13,7 @@ export enum FlowInputItemTypeEnum {
|
||||
chatInput = 'chatInput',
|
||||
selectApp = 'selectApp',
|
||||
// chat special input
|
||||
quoteList = 'quoteList',
|
||||
aiSettings = 'aiSettings',
|
||||
maxToken = 'maxToken',
|
||||
selectChatModel = 'selectChatModel',
|
||||
// dataset special input
|
||||
|
@@ -1,5 +1,98 @@
|
||||
import type { AppSchema } from '@/types/mongoSchema';
|
||||
import type { OutLinkEditType } from '@fastgpt/support/outLink/type.d';
|
||||
import type {
|
||||
LLMModelItemType,
|
||||
ChatModelItemType,
|
||||
FunctionModelItemType,
|
||||
VectorModelItemType
|
||||
} from '@/types/model';
|
||||
|
||||
export const defaultChatModels: ChatModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo',
|
||||
name: 'GPT35-4k',
|
||||
price: 0,
|
||||
maxToken: 4000,
|
||||
quoteMaxToken: 2000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
name: 'GPT35-16k',
|
||||
maxToken: 16000,
|
||||
price: 0,
|
||||
quoteMaxToken: 8000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-4',
|
||||
name: 'GPT4-8k',
|
||||
maxToken: 8000,
|
||||
price: 0,
|
||||
quoteMaxToken: 4000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultQAModels: LLMModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
name: 'GPT35-16k',
|
||||
maxToken: 16000,
|
||||
price: 0
|
||||
}
|
||||
];
|
||||
export const defaultCQModels: FunctionModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
name: 'GPT35-16k',
|
||||
maxToken: 16000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-4',
|
||||
name: 'GPT4-8k',
|
||||
maxToken: 8000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultExtractModels: FunctionModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
name: 'GPT35-16k',
|
||||
maxToken: 16000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultQGModels: LLMModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo',
|
||||
name: 'GPT35-4K',
|
||||
maxToken: 4000,
|
||||
price: 0
|
||||
}
|
||||
];
|
||||
|
||||
export const defaultVectorModels: VectorModelItemType[] = [
|
||||
{
|
||||
model: 'text-embedding-ada-002',
|
||||
name: 'Embedding-2',
|
||||
price: 0,
|
||||
defaultToken: 500,
|
||||
maxToken: 3000
|
||||
}
|
||||
];
|
||||
|
||||
export const defaultApp: AppSchema = {
|
||||
_id: '',
|
||||
|
Reference in New Issue
Block a user