4.6.5- CoreferenceResolution Module (#631)

This commit is contained in:
Archer
2023-12-22 10:47:31 +08:00
committed by GitHub
parent 41115a96c0
commit cd682d4275
112 changed files with 4163 additions and 2700 deletions

View File

@@ -28,7 +28,7 @@ export const simpleMarkdownText = (rawText: string) => {
['####', '###', '##', '#', '```', '~~~'].forEach((item, i) => {
const reg = new RegExp(`\\n\\s*${item}`, 'g');
if (reg.test(rawText)) {
rawText = rawText.replace(new RegExp(`(\\n)\\s*(${item})`, 'g'), '$1$2');
rawText = rawText.replace(new RegExp(`(\\n)( *)(${item})`, 'g'), '$1$3');
}
});

View File

@@ -14,7 +14,7 @@ export type ChatModelItemType = LLMModelItemType & {
};
export type FunctionModelItemType = LLMModelItemType & {
functionCall: boolean;
toolChoice: boolean;
functionPrompt: string;
};

View File

@@ -1,63 +1,5 @@
import type {
LLMModelItemType,
ChatModelItemType,
FunctionModelItemType,
VectorModelItemType,
AudioSpeechModelType,
WhisperModelType,
ReRankModelItemType
} from './model.d';
import type { LLMModelItemType, VectorModelItemType } from './model.d';
export const defaultChatModels: ChatModelItemType[] = [
{
model: 'gpt-3.5-turbo-1106',
name: 'GPT35-1106',
price: 0,
maxContext: 16000,
maxResponse: 4000,
quoteMaxToken: 2000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-3.5-turbo-16k',
name: 'GPT35-16k',
maxContext: 16000,
maxResponse: 16000,
price: 0,
quoteMaxToken: 8000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-4',
name: 'GPT4-8k',
maxContext: 8000,
maxResponse: 8000,
price: 0,
quoteMaxToken: 4000,
maxTemperature: 1.2,
censor: false,
vision: false,
defaultSystemChatPrompt: ''
},
{
model: 'gpt-4-vision-preview',
name: 'GPT4-Vision',
maxContext: 128000,
maxResponse: 4000,
price: 0,
quoteMaxToken: 100000,
maxTemperature: 1.2,
censor: false,
vision: true,
defaultSystemChatPrompt: ''
}
];
export const defaultQAModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo-16k',
@@ -67,46 +9,6 @@ export const defaultQAModels: LLMModelItemType[] = [
price: 0
}
];
export const defaultCQModels: FunctionModelItemType[] = [
{
model: 'gpt-3.5-turbo-1106',
name: 'GPT35-1106',
maxContext: 16000,
maxResponse: 4000,
price: 0,
functionCall: true,
functionPrompt: ''
},
{
model: 'gpt-4',
name: 'GPT4-8k',
maxContext: 8000,
maxResponse: 8000,
price: 0,
functionCall: true,
functionPrompt: ''
}
];
export const defaultExtractModels: FunctionModelItemType[] = [
{
model: 'gpt-3.5-turbo-1106',
name: 'GPT35-1106',
maxContext: 16000,
maxResponse: 4000,
price: 0,
functionCall: true,
functionPrompt: ''
}
];
export const defaultQGModels: LLMModelItemType[] = [
{
model: 'gpt-3.5-turbo-1106',
name: 'GPT35-1106',
maxContext: 1600,
maxResponse: 4000,
price: 0
}
];
export const defaultVectorModels: VectorModelItemType[] = [
{
@@ -117,27 +19,3 @@ export const defaultVectorModels: VectorModelItemType[] = [
maxToken: 3000
}
];
export const defaultReRankModels: ReRankModelItemType[] = [];
export const defaultAudioSpeechModels: AudioSpeechModelType[] = [
{
model: 'tts-1',
name: 'OpenAI TTS1',
price: 0,
voices: [
{ label: 'Alloy', value: 'Alloy', bufferId: 'openai-Alloy' },
{ label: 'Echo', value: 'Echo', bufferId: 'openai-Echo' },
{ label: 'Fable', value: 'Fable', bufferId: 'openai-Fable' },
{ label: 'Onyx', value: 'Onyx', bufferId: 'openai-Onyx' },
{ label: 'Nova', value: 'Nova', bufferId: 'openai-Nova' },
{ label: 'Shimmer', value: 'Shimmer', bufferId: 'openai-Shimmer' }
]
}
];
export const defaultWhisperModel: WhisperModelType = {
model: 'whisper-1',
name: 'Whisper1',
price: 0
};

View File

@@ -67,6 +67,9 @@ export type AppSimpleEditFormType = {
searchMode: `${DatasetSearchModeEnum}`;
searchEmptyText: string;
};
cfr: {
background: string;
};
userGuide: {
welcomeText: string;
variables: {
@@ -111,6 +114,9 @@ export type AppSimpleEditConfigTemplateType = {
searchMode: `${DatasetSearchModeEnum}`;
searchEmptyText?: boolean;
};
cfr?: {
background?: boolean;
};
userGuide?: {
welcomeText?: boolean;
variables?: boolean;

View File

@@ -3,23 +3,23 @@ import { FlowNodeTypeEnum } from '../module/node/constant';
import { ModuleOutputKeyEnum, ModuleInputKeyEnum } from '../module/constants';
import type { FlowNodeInputItemType } from '../module/node/type.d';
import { getGuideModule, splitGuideModule } from '../module/utils';
import { defaultChatModels } from '../ai/model';
import { ModuleItemType } from '../module/type.d';
import { DatasetSearchModeEnum } from '../dataset/constant';
export const getDefaultAppForm = (templateId = 'fastgpt-universal'): AppSimpleEditFormType => {
const defaultChatModel = defaultChatModels[0];
return {
templateId,
aiSettings: {
model: defaultChatModel?.model,
model: 'gpt-3.5-turbo',
systemPrompt: '',
temperature: 0,
isResponseAnswerText: true,
quotePrompt: '',
quoteTemplate: '',
maxToken: defaultChatModel ? defaultChatModel.maxResponse / 2 : 4000
maxToken: 4000
},
cfr: {
background: ''
},
dataset: {
datasets: [],
@@ -116,6 +116,11 @@ export const appModules2Form = ({
questionGuide: questionGuide,
tts: ttsConfig
};
} else if (module.flowType === FlowNodeTypeEnum.cfr) {
const value = module.inputs.find((item) => item.key === ModuleInputKeyEnum.aiSystemPrompt);
if (value) {
defaultAppForm.cfr.background = value.value;
}
}
});

View File

@@ -93,6 +93,7 @@ export type moduleDispatchResType = {
model?: string;
query?: string;
contextTotalLen?: number;
textOutput?: string;
// chat
temperature?: number;
@@ -119,9 +120,7 @@ export type moduleDispatchResType = {
// plugin output
pluginOutput?: Record<string, any>;
// text editor
textOutput?: string;
pluginDetail?: ChatHistoryItemResType[];
// tf switch
tfSwitchResult?: boolean;

View File

@@ -38,7 +38,6 @@ export enum FlowNodeOutputTypeEnum {
}
export enum FlowNodeTypeEnum {
empty = 'empty',
userGuide = 'userGuide',
questionInput = 'questionInput',
historyNode = 'historyNode',
@@ -52,10 +51,10 @@ export enum FlowNodeTypeEnum {
pluginModule = 'pluginModule',
pluginInput = 'pluginInput',
pluginOutput = 'pluginOutput',
textEditor = 'textEditor',
cfr = 'cfr',
// abandon
variable = 'variable'
}
export const EDGE_TYPE = 'smoothstep';
export const EDGE_TYPE = 'default';

View File

@@ -141,7 +141,7 @@ export const AiChatModule: FlowModuleTemplateType = {
},
{
key: ModuleOutputKeyEnum.answerText,
label: 'AI回复',
label: 'AI回复内容',
description: '将在 stream 回复完毕后触发',
valueType: ModuleIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.source,

View File

@@ -0,0 +1,61 @@
import {
FlowNodeInputTypeEnum,
FlowNodeOutputTypeEnum,
FlowNodeTypeEnum
} from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
import {
ModuleIOValueTypeEnum,
ModuleInputKeyEnum,
ModuleOutputKeyEnum,
ModuleTemplateTypeEnum
} from '../../constants';
import {
Input_Template_History,
Input_Template_Switch,
Input_Template_UserChatInput
} from '../input';
export const AiCFR: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.chatNode,
templateType: ModuleTemplateTypeEnum.tools,
flowType: FlowNodeTypeEnum.cfr,
avatar: '/imgs/module/cfr.svg',
name: 'core.module.template.cfr',
intro: 'core.module.template.cfr intro',
showStatus: true,
inputs: [
Input_Template_Switch,
{
key: ModuleInputKeyEnum.aiModel,
type: FlowNodeInputTypeEnum.selectExtractModel,
label: 'core.module.input.label.aiModel',
required: true,
valueType: ModuleIOValueTypeEnum.string,
showTargetInApp: false,
showTargetInPlugin: false
},
{
key: ModuleInputKeyEnum.aiSystemPrompt,
type: FlowNodeInputTypeEnum.textarea,
label: 'core.module.input.label.cfr background',
max: 300,
valueType: ModuleIOValueTypeEnum.string,
description: 'core.module.input.description.cfr background',
placeholder: 'core.module.input.placeholder.cfr background',
showTargetInApp: true,
showTargetInPlugin: true
},
Input_Template_History,
Input_Template_UserChatInput
],
outputs: [
{
key: ModuleOutputKeyEnum.text,
label: 'core.module.output.label.cfr result',
valueType: ModuleIOValueTypeEnum.string,
type: FlowNodeOutputTypeEnum.source,
targets: []
}
]
};

View File

@@ -1,14 +0,0 @@
import { ModuleTemplateTypeEnum } from '../../constants';
import { FlowNodeTypeEnum } from '../../node/constant';
import { FlowModuleTemplateType } from '../../type.d';
export const EmptyModule: FlowModuleTemplateType = {
id: FlowNodeTypeEnum.empty,
templateType: ModuleTemplateTypeEnum.other,
flowType: FlowNodeTypeEnum.empty,
avatar: '/imgs/module/cq.png',
name: '该模块已被移除',
intro: '',
inputs: [],
outputs: []
};

View File

@@ -38,7 +38,7 @@ export type ModuleItemType = {
outputs: FlowNodeOutputItemType[];
};
/* function type */
/* --------------- function type -------------------- */
// variable
export type VariableItemType = {
id: string;
@@ -74,3 +74,46 @@ export type ContextExtractAgentItemType = {
required: boolean;
enum?: string;
};
/* -------------- running module -------------- */
export type RunningModuleItemType = {
name: ModuleItemType['name'];
moduleId: ModuleItemType['moduleId'];
flowType: ModuleItemType['flowType'];
showStatus?: ModuleItemType['showStatus'];
} & {
inputs: {
key: string;
value?: any;
}[];
outputs: {
key: string;
answer?: boolean;
response?: boolean;
value?: any;
targets: {
moduleId: string;
key: string;
}[];
}[];
};
export type ChatDispatchProps = {
res: NextApiResponse;
mode: 'test' | 'chat';
teamId: string;
tmbId: string;
user: UserType;
appId: string;
chatId?: string;
responseChatItemId?: string;
histories: ChatItemType[];
variables: Record<string, any>;
stream: boolean;
detail: boolean; // response detail
};
export type ModuleDispatchProps<T> = ChatDispatchProps & {
outputs: RunningModuleItemType['outputs'];
inputs: T;
};

View File

@@ -15,14 +15,19 @@ export type PluginItemSchema = {
};
/* plugin template */
export type PluginTemplateType = {
export type PluginTemplateType = PluginRuntimeType & {
author?: string;
id: string;
source: `${PluginSourceEnum}`;
templateType: FlowModuleTemplateType['templateType'];
intro: string;
modules: ModuleItemType[];
};
export type PluginRuntimeType = {
teamId?: string;
name: string;
avatar: string;
intro: string;
showStatus?: boolean;
modules: ModuleItemType[];
};

View File

@@ -20,11 +20,12 @@ export async function connectMongo({
console.log('mongo start connect');
try {
mongoose.set('strictQuery', true);
const maxConnecting = Math.max(20, Number(process.env.DB_MAX_LINK || 20));
await mongoose.connect(process.env.MONGODB_URI as string, {
bufferCommands: true,
maxConnecting: Number(process.env.DB_MAX_LINK || 5),
maxPoolSize: Number(process.env.DB_MAX_LINK || 5),
minPoolSize: Math.min(10, Number(process.env.DB_MAX_LINK || 10)),
maxConnecting: maxConnecting,
maxPoolSize: maxConnecting,
minPoolSize: Math.max(5, Math.round(Number(process.env.DB_MAX_LINK || 5) * 0.1)),
connectTimeoutMS: 60000,
waitQueueTimeoutMS: 60000,
socketTimeoutMS: 60000,

View File

@@ -1,14 +1,12 @@
import type { NextApiResponse } from 'next';
import { getAIApi } from '../config';
import { defaultAudioSpeechModels } from '../../../../global/core/ai/model';
import { UserModelSchema } from '@fastgpt/global/support/user/type';
export async function text2Speech({
res,
onSuccess,
onError,
input,
model = defaultAudioSpeechModels[0].model,
model,
voice,
speed = 1
}: {

View File

@@ -0,0 +1,59 @@
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { getAIApi } from '../config';
const prompt = `
您的任务是生成根据用户问题,从不同角度,生成两个不同版本的问题,以便可以从矢量数据库检索相关文档。例如:
问题: FastGPT如何使用
OUTPUT: ["FastGPT使用教程。","怎么使用FastGPT"]
-------------------
问题: FastGPT如何收费
OUTPUT: ["FastGPT收费标准。","FastGPT是如何计费的"]
-------------------
问题: 怎么FastGPT部署
OUTPUT: ["FastGPT的部署方式。","如何部署FastGPT"]
-------------------
问题 question: {{q}}
OUTPUT:
`;
export const searchQueryExtension = async ({ query, model }: { query: string; model: string }) => {
const ai = getAIApi(undefined, 480000);
const result = await ai.chat.completions.create({
model,
temperature: 0,
messages: [
{
role: 'user',
content: replaceVariable(prompt, { q: query })
}
],
stream: false
});
const answer = result.choices?.[0]?.message?.content || '';
if (!answer) {
return {
queries: [query],
model,
inputTokens: 0,
responseTokens: 0
};
}
try {
return {
queries: JSON.parse(answer) as string[],
model,
inputTokens: result.usage?.prompt_tokens || 0,
responseTokens: result.usage?.completion_tokens || 0
};
} catch (error) {
return {
queries: [query],
model,
inputTokens: 0,
responseTokens: 0
};
}
};

View File

@@ -3,7 +3,7 @@ import { FlowModuleTemplateType } from '@fastgpt/global/core/module/type';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { plugin2ModuleIO } from '@fastgpt/global/core/module/utils';
import { PluginSourceEnum } from '@fastgpt/global/core/plugin/constants';
import type { PluginTemplateType } from '@fastgpt/global/core/plugin/type.d';
import type { PluginRuntimeType, PluginTemplateType } from '@fastgpt/global/core/plugin/type.d';
import { ModuleTemplateTypeEnum } from '@fastgpt/global/core/module/constants';
/*
@@ -41,6 +41,7 @@ const getPluginTemplateById = async (id: string): Promise<PluginTemplateType> =>
if (!item) return Promise.reject('plugin not found');
return {
id: String(item._id),
teamId: String(item.teamId),
name: item.name,
avatar: item.avatar,
intro: item.intro,
@@ -74,16 +75,14 @@ export async function getPluginPreviewModule({
}
/* run plugin time */
export async function getPluginRuntimeById(id: string): Promise<PluginTemplateType> {
export async function getPluginRuntimeById(id: string): Promise<PluginRuntimeType> {
const plugin = await getPluginTemplateById(id);
return {
id: plugin.id,
source: plugin.source,
templateType: plugin.templateType,
teamId: plugin.teamId,
name: plugin.name,
avatar: plugin.avatar,
intro: plugin.intro,
showStatus: plugin.showStatus,
modules: plugin.modules
};
}

View File

@@ -0,0 +1,35 @@
import TurndownService from 'turndown';
// @ts-ignore
import * as turndownPluginGfm from 'joplin-turndown-plugin-gfm';
const turndownService = new TurndownService({
headingStyle: 'atx',
bulletListMarker: '-',
codeBlockStyle: 'fenced',
fence: '```',
emDelimiter: '_',
strongDelimiter: '**',
linkStyle: 'inlined',
linkReferenceStyle: 'full'
});
export const htmlStr2Md = (html: string) => {
// 浏览器html字符串转dom
const parser = new DOMParser();
const dom = parser.parseFromString(html, 'text/html');
turndownService.remove(['i', 'script', 'iframe']);
turndownService.addRule('codeBlock', {
filter: 'pre',
replacement(_, node) {
const content = node.textContent?.trim() || '';
// @ts-ignore
const codeName = node?._attrsByQName?.class?.data?.trim() || '';
return `\n\`\`\`${codeName}\n${content}\n\`\`\`\n`;
}
});
turndownService.use(turndownPluginGfm.gfm);
return turndownService.turndown(dom);
};

View File

@@ -1,6 +1,11 @@
{
"name": "@fastgpt/web",
"version": "1.0.0",
"dependencies": {},
"devDependencies": {}
"dependencies": {
"joplin-turndown-plugin-gfm": "^1.0.12",
"turndown": "^7.1.2"
},
"devDependencies": {
"@types/turndown": "^5.0.4"
}
}