This commit is contained in:
Archer
2023-10-17 10:00:32 +08:00
committed by GitHub
parent dd8f2744bf
commit 3b776b6639
98 changed files with 1525 additions and 983 deletions

View File

@@ -10,9 +10,11 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { replaceVariable } from '@/utils/common/tools/text';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
import { defaultCQModel } from '@/pages/api/system/getInitData';
import { FunctionModelItemType } from '@/types/model';
import { getCQModel } from '@/service/core/ai/model';
type Props = ModuleDispatchProps<{
model: string;
systemPrompt?: string;
history?: ChatItemType[];
[SystemInputEnum.userChatInput]: string;
@@ -30,20 +32,26 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
const {
moduleName,
user,
inputs: { agents, userChatInput }
inputs: { model, agents, userChatInput }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const cqModel = global.cqModel || defaultCQModel;
const cqModel = getCQModel(model);
const { arg, tokens } = await (async () => {
if (cqModel.functionCall) {
return functionCall(props);
return functionCall({
...props,
cqModel
});
}
return completions(props);
return completions({
...props,
cqModel
});
})();
const result = agents.find((item) => item.key === arg?.type) || agents[agents.length - 1];
@@ -64,45 +72,45 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
async function functionCall({
user,
cqModel,
inputs: { agents, systemPrompt, history = [], userChatInput }
}: Props) {
const cqModel = global.cqModel;
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...(systemPrompt
? [
{
obj: ChatRoleEnum.System,
value: systemPrompt
}
]
: []),
...history,
{
obj: ChatRoleEnum.Human,
value: userChatInput
value: systemPrompt
? `补充的背景知识:
"""
${systemPrompt}
"""
我的问题: ${userChatInput}
`
: userChatInput
}
];
const filterMessages = ChatContextFilter({
messages,
maxTokens: cqModel.maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
// function body
// function body
const agentFunction = {
name: agentFunName,
description: '判断用户问题类型属于哪方面,返回对应的字段',
description: '请根据对话记录及补充的背景知识,判断用户问题类型,返回对应的字段',
parameters: {
type: 'object',
properties: {
type: {
type: 'string',
description: agents.map((item) => `${item.value},返回:'${item.key}'`).join(''),
description: `判断用户的问题类型,并返回对应的字段。下面是几种问题类型: ${agents
.map((item) => `${item.value},返回:'${item.key}'`)
.join('')}`,
enum: agents.map((item) => item.key)
}
},
required: ['type']
}
}
};
const ai = getAIApi(user.openaiAccount, 48000);
@@ -133,15 +141,14 @@ async function functionCall({
}
async function completions({
cqModel,
user,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
}: Props) {
const extractModel = global.extractModel;
}: Props & { cqModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_CQJson, {
value: replaceVariable(cqModel.functionPrompt || Prompt_CQJson, {
systemPrompt,
typeList: agents.map((item) => `ID: "${item.key}", 问题类型:${item.value}`).join('\n'),
text: `${history.map((item) => `${item.obj}:${item.value}`).join('\n')}
@@ -153,7 +160,7 @@ Human:${userChatInput}`
const ai = getAIApi(user.openaiAccount, 480000);
const data = await ai.chat.completions.create({
model: extractModel.model,
model: cqModel.model,
temperature: 0.01,
messages: adaptChat2GptMessages({ messages, reserveId: false }),
stream: false

View File

@@ -9,7 +9,7 @@ import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@/utils/common/tools/text';
import { defaultExtractModel } from '@/pages/api/system/getInitData';
import { FunctionModelItemType } from '@/types/model';
type Props = ModuleDispatchProps<{
history?: ChatItemType[];
@@ -37,13 +37,19 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
return Promise.reject('Input is empty');
}
const extractModel = global.extractModel || defaultExtractModel;
const extractModel = global.extractModels[0];
const { arg, tokens } = await (async () => {
if (extractModel.functionCall) {
return functionCall(props);
return functionCall({
...props,
extractModel
});
}
return completions(props);
return completions({
...props,
extractModel
});
})();
// remove invalid key
@@ -83,11 +89,10 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
async function functionCall({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
...history,
{
@@ -152,15 +157,14 @@ async function functionCall({
}
async function completions({
extractModel,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
}: Props & { extractModel: FunctionModelItemType }) {
const messages: ChatItemType[] = [
{
obj: ChatRoleEnum.Human,
value: replaceVariable(extractModel.prompt || Prompt_ExtractJson, {
value: replaceVariable(extractModel.functionPrompt || Prompt_ExtractJson, {
description,
json: extractKeys
.map(

View File

@@ -7,7 +7,6 @@ import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/core/ai/type';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { postTextCensor } from '@fastgpt/common/plusApi/censor';
@@ -15,12 +14,13 @@ import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant'
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { defaultQuotePrompt, defaultQuoteTemplate } from '@/global/core/prompt/AIChat';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
import type { AIChatProps } from '@/types/core/aiChat';
import { replaceVariable } from '@/utils/common/tools/text';
import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { responseWrite, responseWriteController } from '@/service/common/stream';
import { responseWrite, responseWriteController } from '@fastgpt/common/tools/stream';
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
@@ -47,12 +47,13 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
user,
outputs,
inputs: {
model = global.chatModels[0]?.model,
model,
temperature = 0,
maxToken = 4000,
history = [],
quoteQA = [],
userChatInput,
isResponseAnswerText = true,
systemPrompt = '',
limitPrompt,
quoteTemplate,
@@ -63,6 +64,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
return Promise.reject('Question is empty');
}
stream = stream && isResponseAnswerText;
// temperature adapt
const modelConstantsData = getChatModel(model);
@@ -110,18 +113,18 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
model,
temperature,
max_tokens,
stream,
messages: [
...(modelConstantsData.defaultSystem
...(modelConstantsData.defaultSystemChatPrompt
? [
{
role: ChatCompletionRequestMessageRoleEnum.System,
content: modelConstantsData.defaultSystem
content: modelConstantsData.defaultSystemChatPrompt
}
]
: []),
...messages
],
stream
]
});
const { answerText, totalTokens, completeMessages } = await (async () => {
@@ -172,7 +175,9 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
price: user.openaiAccount?.key
? 0
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
@@ -198,7 +203,7 @@ function filterQuote({
maxTokens: model.quoteMaxToken,
messages: quoteQA.map((item, index) => ({
obj: ChatRoleEnum.System,
value: replaceVariable(quoteTemplate || defaultQuoteTemplate, {
value: replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
...item,
index: index + 1
})
@@ -212,7 +217,7 @@ function filterQuote({
filterQuoteQA.length > 0
? `${filterQuoteQA
.map((item, index) =>
replaceVariable(quoteTemplate || defaultQuoteTemplate, {
replaceVariable(quoteTemplate || Prompt_QuoteTemplateList[0].value, {
...item,
index: `${index + 1}`
})
@@ -243,7 +248,7 @@ function getChatMessages({
model: ChatModelItemType;
}) {
const question = quoteText
? replaceVariable(quotePrompt || defaultQuotePrompt, {
? replaceVariable(quotePrompt || Prompt_QuotePromptList[0].value, {
quote: quoteText,
question: userChatInput
})
@@ -275,7 +280,7 @@ function getChatMessages({
const filterMessages = ChatContextFilter({
messages,
maxTokens: Math.ceil(model.contextMaxToken - 300) // filter token. not response maxToken
maxTokens: Math.ceil(model.maxToken - 300) // filter token. not response maxToken
});
const adaptMessages = adaptChat2GptMessages({ messages: filterMessages, reserveId: false });
@@ -294,7 +299,7 @@ function getMaxTokens({
model: ChatModelItemType;
filterMessages: ChatProps['inputs']['history'];
}) {
const tokensLimit = model.contextMaxToken;
const tokensLimit = model.maxToken;
/* count response max token */
const promptsToken = countMessagesTokens({
@@ -349,7 +354,7 @@ async function streamResponse({
stream.controller?.abort();
break;
}
const content = part.choices[0]?.delta?.content || '';
const content = part.choices?.[0]?.delta?.content || '';
answer += content;
responseWrite({

View File

@@ -8,6 +8,7 @@ import type { QuoteItemType } from '@/types/chat';
import { PgDatasetTableName } from '@/constants/plugin';
import { FlowModuleTypeEnum } from '@/constants/flow';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModelTypeEnum } from '@/service/core/ai/model';
type KBSearchProps = ModuleDispatchProps<{
kbList: SelectedDatasetType;
similarity: number;
@@ -66,7 +67,11 @@ export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSe
responseData: {
moduleType: FlowModuleTypeEnum.kbSearchNode,
moduleName,
price: countModelPrice({ model: vectorModel.model, tokens: tokenLen }),
price: countModelPrice({
model: vectorModel.model,
tokens: tokenLen,
type: ModelTypeEnum.vector
}),
model: vectorModel.name,
tokens: tokenLen,
similarity,

View File

@@ -1,5 +1,5 @@
import { sseResponseEventEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
export type AnswerProps = ModuleDispatchProps<{
@@ -21,7 +21,7 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
const formatText = typeof text === 'string' ? text : JSON.stringify(text, null, 2);
if (stream) {
sseResponse({
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({

View File

@@ -3,7 +3,7 @@ import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { SelectAppItemType } from '@/types/core/app/flow';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { App } from '@/service/mongo';
import { responseWrite } from '@/service/common/stream';
import { responseWrite } from '@fastgpt/common/tools/stream';
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
import { textAdaptGptResponse } from '@/utils/adapt';