mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 13:03:50 +00:00
4.6.5- CoreferenceResolution Module (#631)
This commit is contained in:
2
packages/global/core/ai/model.d.ts
vendored
2
packages/global/core/ai/model.d.ts
vendored
@@ -14,7 +14,7 @@ export type ChatModelItemType = LLMModelItemType & {
|
||||
};
|
||||
|
||||
export type FunctionModelItemType = LLMModelItemType & {
|
||||
functionCall: boolean;
|
||||
toolChoice: boolean;
|
||||
functionPrompt: string;
|
||||
};
|
||||
|
||||
|
@@ -1,63 +1,5 @@
|
||||
import type {
|
||||
LLMModelItemType,
|
||||
ChatModelItemType,
|
||||
FunctionModelItemType,
|
||||
VectorModelItemType,
|
||||
AudioSpeechModelType,
|
||||
WhisperModelType,
|
||||
ReRankModelItemType
|
||||
} from './model.d';
|
||||
import type { LLMModelItemType, VectorModelItemType } from './model.d';
|
||||
|
||||
export const defaultChatModels: ChatModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-1106',
|
||||
name: 'GPT35-1106',
|
||||
price: 0,
|
||||
maxContext: 16000,
|
||||
maxResponse: 4000,
|
||||
quoteMaxToken: 2000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
vision: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
name: 'GPT35-16k',
|
||||
maxContext: 16000,
|
||||
maxResponse: 16000,
|
||||
price: 0,
|
||||
quoteMaxToken: 8000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
vision: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-4',
|
||||
name: 'GPT4-8k',
|
||||
maxContext: 8000,
|
||||
maxResponse: 8000,
|
||||
price: 0,
|
||||
quoteMaxToken: 4000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
vision: false,
|
||||
defaultSystemChatPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-4-vision-preview',
|
||||
name: 'GPT4-Vision',
|
||||
maxContext: 128000,
|
||||
maxResponse: 4000,
|
||||
price: 0,
|
||||
quoteMaxToken: 100000,
|
||||
maxTemperature: 1.2,
|
||||
censor: false,
|
||||
vision: true,
|
||||
defaultSystemChatPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultQAModels: LLMModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-16k',
|
||||
@@ -67,46 +9,6 @@ export const defaultQAModels: LLMModelItemType[] = [
|
||||
price: 0
|
||||
}
|
||||
];
|
||||
export const defaultCQModels: FunctionModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-1106',
|
||||
name: 'GPT35-1106',
|
||||
maxContext: 16000,
|
||||
maxResponse: 4000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
},
|
||||
{
|
||||
model: 'gpt-4',
|
||||
name: 'GPT4-8k',
|
||||
maxContext: 8000,
|
||||
maxResponse: 8000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultExtractModels: FunctionModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-1106',
|
||||
name: 'GPT35-1106',
|
||||
maxContext: 16000,
|
||||
maxResponse: 4000,
|
||||
price: 0,
|
||||
functionCall: true,
|
||||
functionPrompt: ''
|
||||
}
|
||||
];
|
||||
export const defaultQGModels: LLMModelItemType[] = [
|
||||
{
|
||||
model: 'gpt-3.5-turbo-1106',
|
||||
name: 'GPT35-1106',
|
||||
maxContext: 1600,
|
||||
maxResponse: 4000,
|
||||
price: 0
|
||||
}
|
||||
];
|
||||
|
||||
export const defaultVectorModels: VectorModelItemType[] = [
|
||||
{
|
||||
@@ -117,27 +19,3 @@ export const defaultVectorModels: VectorModelItemType[] = [
|
||||
maxToken: 3000
|
||||
}
|
||||
];
|
||||
|
||||
export const defaultReRankModels: ReRankModelItemType[] = [];
|
||||
|
||||
export const defaultAudioSpeechModels: AudioSpeechModelType[] = [
|
||||
{
|
||||
model: 'tts-1',
|
||||
name: 'OpenAI TTS1',
|
||||
price: 0,
|
||||
voices: [
|
||||
{ label: 'Alloy', value: 'Alloy', bufferId: 'openai-Alloy' },
|
||||
{ label: 'Echo', value: 'Echo', bufferId: 'openai-Echo' },
|
||||
{ label: 'Fable', value: 'Fable', bufferId: 'openai-Fable' },
|
||||
{ label: 'Onyx', value: 'Onyx', bufferId: 'openai-Onyx' },
|
||||
{ label: 'Nova', value: 'Nova', bufferId: 'openai-Nova' },
|
||||
{ label: 'Shimmer', value: 'Shimmer', bufferId: 'openai-Shimmer' }
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
export const defaultWhisperModel: WhisperModelType = {
|
||||
model: 'whisper-1',
|
||||
name: 'Whisper1',
|
||||
price: 0
|
||||
};
|
||||
|
6
packages/global/core/app/type.d.ts
vendored
6
packages/global/core/app/type.d.ts
vendored
@@ -67,6 +67,9 @@ export type AppSimpleEditFormType = {
|
||||
searchMode: `${DatasetSearchModeEnum}`;
|
||||
searchEmptyText: string;
|
||||
};
|
||||
cfr: {
|
||||
background: string;
|
||||
};
|
||||
userGuide: {
|
||||
welcomeText: string;
|
||||
variables: {
|
||||
@@ -111,6 +114,9 @@ export type AppSimpleEditConfigTemplateType = {
|
||||
searchMode: `${DatasetSearchModeEnum}`;
|
||||
searchEmptyText?: boolean;
|
||||
};
|
||||
cfr?: {
|
||||
background?: boolean;
|
||||
};
|
||||
userGuide?: {
|
||||
welcomeText?: boolean;
|
||||
variables?: boolean;
|
||||
|
@@ -3,23 +3,23 @@ import { FlowNodeTypeEnum } from '../module/node/constant';
|
||||
import { ModuleOutputKeyEnum, ModuleInputKeyEnum } from '../module/constants';
|
||||
import type { FlowNodeInputItemType } from '../module/node/type.d';
|
||||
import { getGuideModule, splitGuideModule } from '../module/utils';
|
||||
import { defaultChatModels } from '../ai/model';
|
||||
import { ModuleItemType } from '../module/type.d';
|
||||
import { DatasetSearchModeEnum } from '../dataset/constant';
|
||||
|
||||
export const getDefaultAppForm = (templateId = 'fastgpt-universal'): AppSimpleEditFormType => {
|
||||
const defaultChatModel = defaultChatModels[0];
|
||||
|
||||
return {
|
||||
templateId,
|
||||
aiSettings: {
|
||||
model: defaultChatModel?.model,
|
||||
model: 'gpt-3.5-turbo',
|
||||
systemPrompt: '',
|
||||
temperature: 0,
|
||||
isResponseAnswerText: true,
|
||||
quotePrompt: '',
|
||||
quoteTemplate: '',
|
||||
maxToken: defaultChatModel ? defaultChatModel.maxResponse / 2 : 4000
|
||||
maxToken: 4000
|
||||
},
|
||||
cfr: {
|
||||
background: ''
|
||||
},
|
||||
dataset: {
|
||||
datasets: [],
|
||||
@@ -116,6 +116,11 @@ export const appModules2Form = ({
|
||||
questionGuide: questionGuide,
|
||||
tts: ttsConfig
|
||||
};
|
||||
} else if (module.flowType === FlowNodeTypeEnum.cfr) {
|
||||
const value = module.inputs.find((item) => item.key === ModuleInputKeyEnum.aiSystemPrompt);
|
||||
if (value) {
|
||||
defaultAppForm.cfr.background = value.value;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
5
packages/global/core/chat/type.d.ts
vendored
5
packages/global/core/chat/type.d.ts
vendored
@@ -93,6 +93,7 @@ export type moduleDispatchResType = {
|
||||
model?: string;
|
||||
query?: string;
|
||||
contextTotalLen?: number;
|
||||
textOutput?: string;
|
||||
|
||||
// chat
|
||||
temperature?: number;
|
||||
@@ -119,9 +120,7 @@ export type moduleDispatchResType = {
|
||||
|
||||
// plugin output
|
||||
pluginOutput?: Record<string, any>;
|
||||
|
||||
// text editor
|
||||
textOutput?: string;
|
||||
pluginDetail?: ChatHistoryItemResType[];
|
||||
|
||||
// tf switch
|
||||
tfSwitchResult?: boolean;
|
||||
|
@@ -38,7 +38,6 @@ export enum FlowNodeOutputTypeEnum {
|
||||
}
|
||||
|
||||
export enum FlowNodeTypeEnum {
|
||||
empty = 'empty',
|
||||
userGuide = 'userGuide',
|
||||
questionInput = 'questionInput',
|
||||
historyNode = 'historyNode',
|
||||
@@ -52,10 +51,10 @@ export enum FlowNodeTypeEnum {
|
||||
pluginModule = 'pluginModule',
|
||||
pluginInput = 'pluginInput',
|
||||
pluginOutput = 'pluginOutput',
|
||||
textEditor = 'textEditor',
|
||||
cfr = 'cfr',
|
||||
|
||||
// abandon
|
||||
variable = 'variable'
|
||||
}
|
||||
|
||||
export const EDGE_TYPE = 'smoothstep';
|
||||
export const EDGE_TYPE = 'default';
|
||||
|
@@ -141,7 +141,7 @@ export const AiChatModule: FlowModuleTemplateType = {
|
||||
},
|
||||
{
|
||||
key: ModuleOutputKeyEnum.answerText,
|
||||
label: 'AI回复',
|
||||
label: 'AI回复内容',
|
||||
description: '将在 stream 回复完毕后触发',
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.source,
|
||||
|
@@ -0,0 +1,61 @@
|
||||
import {
|
||||
FlowNodeInputTypeEnum,
|
||||
FlowNodeOutputTypeEnum,
|
||||
FlowNodeTypeEnum
|
||||
} from '../../node/constant';
|
||||
import { FlowModuleTemplateType } from '../../type.d';
|
||||
import {
|
||||
ModuleIOValueTypeEnum,
|
||||
ModuleInputKeyEnum,
|
||||
ModuleOutputKeyEnum,
|
||||
ModuleTemplateTypeEnum
|
||||
} from '../../constants';
|
||||
import {
|
||||
Input_Template_History,
|
||||
Input_Template_Switch,
|
||||
Input_Template_UserChatInput
|
||||
} from '../input';
|
||||
|
||||
export const AiCFR: FlowModuleTemplateType = {
|
||||
id: FlowNodeTypeEnum.chatNode,
|
||||
templateType: ModuleTemplateTypeEnum.tools,
|
||||
flowType: FlowNodeTypeEnum.cfr,
|
||||
avatar: '/imgs/module/cfr.svg',
|
||||
name: 'core.module.template.cfr',
|
||||
intro: 'core.module.template.cfr intro',
|
||||
showStatus: true,
|
||||
inputs: [
|
||||
Input_Template_Switch,
|
||||
{
|
||||
key: ModuleInputKeyEnum.aiModel,
|
||||
type: FlowNodeInputTypeEnum.selectExtractModel,
|
||||
label: 'core.module.input.label.aiModel',
|
||||
required: true,
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
showTargetInApp: false,
|
||||
showTargetInPlugin: false
|
||||
},
|
||||
{
|
||||
key: ModuleInputKeyEnum.aiSystemPrompt,
|
||||
type: FlowNodeInputTypeEnum.textarea,
|
||||
label: 'core.module.input.label.cfr background',
|
||||
max: 300,
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
description: 'core.module.input.description.cfr background',
|
||||
placeholder: 'core.module.input.placeholder.cfr background',
|
||||
showTargetInApp: true,
|
||||
showTargetInPlugin: true
|
||||
},
|
||||
Input_Template_History,
|
||||
Input_Template_UserChatInput
|
||||
],
|
||||
outputs: [
|
||||
{
|
||||
key: ModuleOutputKeyEnum.text,
|
||||
label: 'core.module.output.label.cfr result',
|
||||
valueType: ModuleIOValueTypeEnum.string,
|
||||
type: FlowNodeOutputTypeEnum.source,
|
||||
targets: []
|
||||
}
|
||||
]
|
||||
};
|
@@ -1,14 +0,0 @@
|
||||
import { ModuleTemplateTypeEnum } from '../../constants';
|
||||
import { FlowNodeTypeEnum } from '../../node/constant';
|
||||
import { FlowModuleTemplateType } from '../../type.d';
|
||||
|
||||
export const EmptyModule: FlowModuleTemplateType = {
|
||||
id: FlowNodeTypeEnum.empty,
|
||||
templateType: ModuleTemplateTypeEnum.other,
|
||||
flowType: FlowNodeTypeEnum.empty,
|
||||
avatar: '/imgs/module/cq.png',
|
||||
name: '该模块已被移除',
|
||||
intro: '',
|
||||
inputs: [],
|
||||
outputs: []
|
||||
};
|
45
packages/global/core/module/type.d.ts
vendored
45
packages/global/core/module/type.d.ts
vendored
@@ -38,7 +38,7 @@ export type ModuleItemType = {
|
||||
outputs: FlowNodeOutputItemType[];
|
||||
};
|
||||
|
||||
/* function type */
|
||||
/* --------------- function type -------------------- */
|
||||
// variable
|
||||
export type VariableItemType = {
|
||||
id: string;
|
||||
@@ -74,3 +74,46 @@ export type ContextExtractAgentItemType = {
|
||||
required: boolean;
|
||||
enum?: string;
|
||||
};
|
||||
|
||||
/* -------------- running module -------------- */
|
||||
export type RunningModuleItemType = {
|
||||
name: ModuleItemType['name'];
|
||||
moduleId: ModuleItemType['moduleId'];
|
||||
flowType: ModuleItemType['flowType'];
|
||||
showStatus?: ModuleItemType['showStatus'];
|
||||
} & {
|
||||
inputs: {
|
||||
key: string;
|
||||
value?: any;
|
||||
}[];
|
||||
outputs: {
|
||||
key: string;
|
||||
answer?: boolean;
|
||||
response?: boolean;
|
||||
value?: any;
|
||||
targets: {
|
||||
moduleId: string;
|
||||
key: string;
|
||||
}[];
|
||||
}[];
|
||||
};
|
||||
|
||||
export type ChatDispatchProps = {
|
||||
res: NextApiResponse;
|
||||
mode: 'test' | 'chat';
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
user: UserType;
|
||||
appId: string;
|
||||
chatId?: string;
|
||||
responseChatItemId?: string;
|
||||
histories: ChatItemType[];
|
||||
variables: Record<string, any>;
|
||||
stream: boolean;
|
||||
detail: boolean; // response detail
|
||||
};
|
||||
|
||||
export type ModuleDispatchProps<T> = ChatDispatchProps & {
|
||||
outputs: RunningModuleItemType['outputs'];
|
||||
inputs: T;
|
||||
};
|
||||
|
9
packages/global/core/plugin/type.d.ts
vendored
9
packages/global/core/plugin/type.d.ts
vendored
@@ -15,14 +15,19 @@ export type PluginItemSchema = {
|
||||
};
|
||||
|
||||
/* plugin template */
|
||||
export type PluginTemplateType = {
|
||||
export type PluginTemplateType = PluginRuntimeType & {
|
||||
author?: string;
|
||||
id: string;
|
||||
source: `${PluginSourceEnum}`;
|
||||
templateType: FlowModuleTemplateType['templateType'];
|
||||
intro: string;
|
||||
modules: ModuleItemType[];
|
||||
};
|
||||
|
||||
export type PluginRuntimeType = {
|
||||
teamId?: string;
|
||||
name: string;
|
||||
avatar: string;
|
||||
intro: string;
|
||||
showStatus?: boolean;
|
||||
modules: ModuleItemType[];
|
||||
};
|
||||
|
Reference in New Issue
Block a user