This commit is contained in:
Archer
2023-10-07 18:02:20 +08:00
committed by GitHub
parent c65a36d3ab
commit 98ce5103a0
56 changed files with 868 additions and 282 deletions

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import type { ClassifyQuestionAgentItemType } from '@/types/app';
import { SystemInputEnum } from '@/constants/app';
import { SpecialInputKeyEnum } from '@/constants/flow';
@@ -29,7 +29,7 @@ const agentFunName = 'agent_user_question';
export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse> => {
const {
moduleName,
userOpenaiAccount,
user,
inputs: { agents, userChatInput }
} = props as Props;
@@ -53,7 +53,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.classifyQuestion,
moduleName,
price: userOpenaiAccount?.key ? 0 : cqModel.price * tokens,
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
tokens,
cqList: agents,
@@ -63,7 +63,7 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
};
async function functionCall({
userOpenaiAccount,
user,
inputs: { agents, systemPrompt, history = [], userChatInput }
}: Props) {
const cqModel = global.cqModel;
@@ -105,7 +105,7 @@ async function functionCall({
required: ['type']
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -116,7 +116,7 @@ async function functionCall({
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -138,7 +138,7 @@ async function functionCall({
}
async function completions({
userOpenaiAccount,
user,
inputs: { agents, systemPrompt = '', history = [], userChatInput }
}: Props) {
const extractModel = global.extractModel;
@@ -155,7 +155,7 @@ Human:${userChatInput}`
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
@@ -166,7 +166,7 @@ Human:${userChatInput}`
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';

View File

@@ -2,7 +2,7 @@ import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
import { ChatContextFilter } from '@/service/common/tiktoken';
import type { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@/constants/chat';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import type { ContextExtractAgentItemType } from '@/types/app';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { FlowModuleTypeEnum } from '@/constants/flow';
@@ -29,7 +29,7 @@ const agentFunName = 'agent_extract_data';
export async function dispatchContentExtract(props: Props): Promise<Response> {
const {
moduleName,
userOpenaiAccount,
user,
inputs: { content, description, extractKeys }
} = props;
@@ -73,7 +73,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.contentExtract,
moduleName,
price: userOpenaiAccount?.key ? 0 : extractModel.price * tokens,
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
model: extractModel.name || '',
tokens,
extractDescription: description,
@@ -83,7 +83,7 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
async function functionCall({
userOpenaiAccount,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
@@ -126,7 +126,7 @@ async function functionCall({
}
};
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -137,7 +137,7 @@ async function functionCall({
functions: [agentFunction]
},
{
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -157,7 +157,7 @@ async function functionCall({
}
async function completions({
userOpenaiAccount,
user,
inputs: { history = [], content, extractKeys, description }
}: Props) {
const extractModel = global.extractModel;
@@ -181,7 +181,7 @@ Human: ${content}`
}
];
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const { data } = await chatAPI.createChatCompletion(
{
@@ -192,7 +192,7 @@ Human: ${content}`
},
{
timeout: 480000,
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
const answer = data.choices?.[0].message?.content || '';

View File

@@ -5,13 +5,13 @@ import type { ChatHistoryItemResType } from '@/types/chat';
import { ChatRoleEnum, sseResponseEventEnum } from '@/constants/chat';
import { SSEParseData, parseStreamChunk } from '@/utils/sse';
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/aiApi/config';
import { getAIChatApi, axiosConfig } from '@fastgpt/core/ai/config';
import { TaskResponseKeyEnum } from '@/constants/chat';
import { getChatModel } from '@/service/utils/data';
import { countModelPrice } from '@/service/common/bill/push';
import { ChatModelItemType } from '@/types/model';
import { textCensor } from '@/api/service/plugins';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/aiApi/constant';
import { ChatCompletionRequestMessageRoleEnum } from '@fastgpt/core/ai/constant';
import { AppModuleItemType } from '@/types/app';
import { countMessagesTokens, sliceMessagesTB } from '@/utils/common/tiktoken';
import { adaptChat2GptMessages } from '@/utils/common/adapt/message';
@@ -35,6 +35,7 @@ export type ChatProps = ModuleDispatchProps<
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType;
[TaskResponseKeyEnum.history]: ChatItemType[];
finish: boolean;
};
@@ -45,7 +46,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
moduleName,
stream = false,
detail = false,
userOpenaiAccount,
user,
outputs,
inputs: {
model = global.chatModels[0]?.model,
@@ -105,7 +106,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
// FastGPT temperature range: 1~10
temperature = +(modelConstantsData.maxTemperature * (temperature / 10)).toFixed(2);
temperature = Math.max(temperature, 0.01);
const chatAPI = getAIChatApi(userOpenaiAccount);
const chatAPI = getAIChatApi(user.openaiAccount);
const response = await chatAPI.createChatCompletion(
{
@@ -128,7 +129,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
{
timeout: 480000,
responseType: stream ? 'stream' : 'json',
...axiosConfig(userOpenaiAccount)
...axiosConfig(user.openaiAccount)
}
);
@@ -179,7 +180,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
[TaskResponseKeyEnum.responseData]: {
moduleType: FlowModuleTypeEnum.chatNode,
moduleName,
price: userOpenaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
price: user.openaiAccount?.key ? 0 : countModelPrice({ model, tokens: totalTokens }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
@@ -187,6 +188,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages)
},
[TaskResponseKeyEnum.history]: completeMessages,
finish: true
};
};

View File

@@ -4,5 +4,6 @@ export * from './chat/oneapi';
export * from './kb/search';
export * from './tools/answer';
export * from './tools/http';
export * from './tools/runApp';
export * from './agent/classifyQuestion';
export * from './agent/extract';

View File

@@ -6,7 +6,6 @@ export type AnswerProps = ModuleDispatchProps<{
text: string;
}>;
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
finish: boolean;
};
@@ -29,7 +28,6 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
}
return {
[TaskResponseKeyEnum.answerText]: text,
finish: true
};
};

View File

@@ -0,0 +1,80 @@
import { ChatHistoryItemResType, ChatItemType } from '@/types/chat';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { SelectAppItemType } from '@/types/core/app/flow';
import { dispatchModules } from '@/pages/api/v1/chat/completions';
import { App } from '@/service/mongo';
import { responseWrite } from '@/service/common/stream';
import { ChatRoleEnum, TaskResponseKeyEnum, sseResponseEventEnum } from '@/constants/chat';
import { textAdaptGptResponse } from '@/utils/adapt';
type Props = ModuleDispatchProps<{
userChatInput: string;
history?: ChatItemType[];
app: SelectAppItemType;
}>;
type Response = {
finish: boolean;
[TaskResponseKeyEnum.responseData]: ChatHistoryItemResType[];
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.history]: ChatItemType[];
};
export const dispatchAppRequest = async (props: Record<string, any>): Promise<Response> => {
const {
res,
variables,
user,
stream,
detail,
inputs: { userChatInput, history = [], app }
} = props as Props;
if (!userChatInput) {
return Promise.reject('Input is empty');
}
const appData = await App.findById(app.id);
if (!appData) {
return Promise.reject('App not found');
}
responseWrite({
res,
event: detail ? sseResponseEventEnum.answer : undefined,
data: textAdaptGptResponse({
text: '\n'
})
});
const { responseData, answerText } = await dispatchModules({
res,
modules: appData.modules,
user,
variables,
params: {
history,
userChatInput
},
stream,
detail
});
const completeMessages = history.concat([
{
obj: ChatRoleEnum.Human,
value: userChatInput
},
{
obj: ChatRoleEnum.AI,
value: answerText
}
]);
return {
finish: true,
responseData,
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.history]: completeMessages
};
};