v4.6.2-alpah (#511)

This commit is contained in:
Archer
2023-11-24 15:29:43 +08:00
committed by GitHub
parent 60f752629f
commit 9cb4280a16
208 changed files with 5396 additions and 3500 deletions

View File

@@ -0,0 +1,12 @@
import { AppSimpleEditConfigTemplateType } from '@fastgpt/global/core/app/type';
import { GET } from '@fastgpt/service/common/api/plusRequest';
export async function getSimpleTemplatesFromPlus(): Promise<AppSimpleEditConfigTemplateType[]> {
try {
if (!global.systemEnv.pluginBaseUrl) return [];
return GET<AppSimpleEditConfigTemplateType[]>('/core/app/getSimpleTemplates');
} catch (error) {
return [];
}
}

View File

@@ -27,6 +27,7 @@ export async function insertData2Dataset({
model: string;
}) {
if (!q || !datasetId || !collectionId || !model) {
console.log(q, a, datasetId, collectionId, model);
return Promise.reject('q, datasetId, collectionId, model is required');
}
if (String(teamId) === String(tmbId)) {

View File

@@ -153,11 +153,11 @@ export async function searchDatasetData({
SET LOCAL hnsw.ef_search = ${global.systemEnv.pgHNSWEfSearch || 100};
select id, collection_id, data_id, (vector <#> '[${
vectors[0]
}]') * -1 AS score from ${PgDatasetTableName} where dataset_id IN (${datasetIds
.map((id) => `'${String(id)}'`)
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
}]') * -1 AS score from ${PgDatasetTableName}
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')}) AND vector <#> '[${
vectors[0]
}]' limit ${minLimit};
}]' < -${similarity}
order by score desc limit ${minLimit};
COMMIT;`
);

View File

@@ -0,0 +1,7 @@
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
export const initRunningModuleType: Record<string, boolean> = {
[FlowNodeTypeEnum.historyNode]: true,
[FlowNodeTypeEnum.questionInput]: true,
[FlowNodeTypeEnum.pluginInput]: true
};

View File

@@ -1,11 +1,10 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ClassifyQuestionAgentItemType } from '@fastgpt/global/core/module/type.d';
import { SystemInputEnum } from '@/constants/app';
import { FlowNodeSpecialInputKeyEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { Prompt_CQJson } from '@/global/core/prompt/agent';
@@ -13,14 +12,14 @@ import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
import { getCQModel } from '@/service/core/ai/model';
type Props = ModuleDispatchProps<{
model: string;
systemPrompt?: string;
history?: ChatItemType[];
[SystemInputEnum.userChatInput]: string;
[FlowNodeSpecialInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
[ModuleInputKeyEnum.aiModel]: string;
[ModuleInputKeyEnum.aiSystemPrompt]?: string;
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.agents]: ClassifyQuestionAgentItemType[];
}>;
type CQResponse = {
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[key: string]: any;
};
@@ -56,9 +55,10 @@ export const dispatchClassifyQuestion = async (props: Props): Promise<CQResponse
return {
[result.key]: 1,
[TaskResponseKeyEnum.responseData]: {
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : cqModel.price * tokens,
model: cqModel.name || '',
query: userChatInput,
tokens,
cqList: agents,
cqResult: result.value

View File

@@ -1,26 +1,26 @@
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { ChatContextFilter } from '@fastgpt/service/core/chat/utils';
import type { moduleDispatchResType, ChatItemType } from '@fastgpt/global/core/chat/type.d';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ContextExtractAgentItemType } from '@fastgpt/global/core/module/type';
import { ContextExtractEnum } from '@/constants/flow/flowField';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { Prompt_ExtractJson } from '@/global/core/prompt/agent';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { FunctionModelItemType } from '@fastgpt/global/core/ai/model.d';
type Props = ModuleDispatchProps<{
history?: ChatItemType[];
[ContextExtractEnum.content]: string;
[ContextExtractEnum.extractKeys]: ContextExtractAgentItemType[];
[ContextExtractEnum.description]: string;
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.contextExtractInput]: string;
[ModuleInputKeyEnum.extractKeys]: ContextExtractAgentItemType[];
[ModuleInputKeyEnum.description]: string;
}>;
type Response = {
[ContextExtractEnum.success]?: boolean;
[ContextExtractEnum.failed]?: boolean;
[ContextExtractEnum.fields]: string;
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.success]?: boolean;
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.contextExtractFields]: string;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
};
const agentFunName = 'agent_extract_data';
@@ -70,13 +70,14 @@ export async function dispatchContentExtract(props: Props): Promise<Response> {
}
return {
[ContextExtractEnum.success]: success ? true : undefined,
[ContextExtractEnum.failed]: success ? undefined : true,
[ContextExtractEnum.fields]: JSON.stringify(arg),
[ModuleOutputKeyEnum.success]: success ? true : undefined,
[ModuleOutputKeyEnum.failed]: success ? undefined : true,
[ModuleOutputKeyEnum.contextExtractFields]: JSON.stringify(arg),
...arg,
[TaskResponseKeyEnum.responseData]: {
[ModuleOutputKeyEnum.responseData]: {
price: user.openaiAccount?.key ? 0 : extractModel.price * tokens,
model: extractModel.name || '',
query: content,
tokens,
extractDescription: description,
extractResult: arg

View File

@@ -6,7 +6,6 @@ import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant'
import { textAdaptGptResponse } from '@/utils/adapt';
import { getAIApi } from '@fastgpt/service/core/ai/config';
import type { ChatCompletion, StreamChatType } from '@fastgpt/global/core/ai/type.d';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { countModelPrice } from '@/service/support/wallet/bill/utils';
import type { ChatModelItemType } from '@fastgpt/global/core/ai/model.d';
import { postTextCensor } from '@/service/common/censor';
@@ -15,26 +14,26 @@ import type { ModuleItemType } from '@fastgpt/global/core/module/type.d';
import { countMessagesTokens, sliceMessagesTB } from '@fastgpt/global/common/string/tiktoken';
import { adaptChat2GptMessages } from '@fastgpt/global/core/chat/adapt';
import { Prompt_QuotePromptList, Prompt_QuoteTemplateList } from '@/global/core/prompt/AIChat';
import type { AIChatProps } from '@/types/core/aiChat';
import type { AIChatModuleProps } from '@fastgpt/global/core/module/node/type.d';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { responseWrite, responseWriteController } from '@fastgpt/service/common/response';
import { getChatModel, ModelTypeEnum } from '@/service/core/ai/model';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import { formatStr2ChatContent } from '@fastgpt/service/core/chat/utils';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
export type ChatProps = ModuleDispatchProps<
AIChatProps & {
userChatInput: string;
history?: ChatItemType[];
quoteQA?: SearchDataResponseItemType[];
limitPrompt?: string;
AIChatModuleProps & {
[ModuleInputKeyEnum.userChatInput]: string;
[ModuleInputKeyEnum.history]?: ChatItemType[];
[ModuleInputKeyEnum.aiChatDatasetQuote]?: SearchDataResponseItemType[];
}
>;
export type ChatResponse = {
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
[TaskResponseKeyEnum.history]: ChatItemType[];
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.history]: ChatItemType[];
};
/* request openai chat */
@@ -54,7 +53,6 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
userChatInput,
isResponseAnswerText = true,
systemPrompt = '',
limitPrompt,
quoteTemplate,
quotePrompt
}
@@ -93,8 +91,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
quoteText,
quotePrompt,
userChatInput,
systemPrompt,
limitPrompt
systemPrompt
});
const { max_tokens } = getMaxTokens({
model: modelConstantsData,
@@ -182,19 +179,19 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
})();
return {
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.responseData]: {
answerText,
responseData: {
price: user.openaiAccount?.key
? 0
: countModelPrice({ model, tokens: totalTokens, type: ModelTypeEnum.chat }),
model: modelConstantsData.name,
tokens: totalTokens,
question: userChatInput,
query: userChatInput,
maxToken: max_tokens,
quoteList: filterQuoteQA,
historyPreview: getHistoryPreview(completeMessages)
},
[TaskResponseKeyEnum.history]: completeMessages
history: completeMessages
};
};
@@ -243,7 +240,6 @@ function getChatMessages({
quoteText,
history = [],
systemPrompt,
limitPrompt,
userChatInput,
model
}: {
@@ -251,7 +247,6 @@ function getChatMessages({
quoteText: string;
history: ChatProps['inputs']['history'];
systemPrompt: string;
limitPrompt?: string;
userChatInput: string;
model: ChatModelItemType;
}) {
@@ -272,14 +267,6 @@ function getChatMessages({
]
: []),
...history,
...(limitPrompt
? [
{
obj: ChatRoleEnum.System,
value: limitPrompt
}
]
: []),
{
obj: ChatRoleEnum.Human,
value: question
@@ -313,7 +300,7 @@ function getMaxTokens({
const promptsToken = countMessagesTokens({
messages: filterMessages
});
maxToken = promptsToken + model.maxResponse > tokensLimit ? tokensLimit - promptsToken : maxToken;
maxToken = promptsToken + maxToken > tokensLimit ? tokensLimit - promptsToken : maxToken;
return {
max_tokens: maxToken
@@ -330,7 +317,7 @@ function targetResponse({
detail: boolean;
}) {
const targets =
outputs.find((output) => output.key === TaskResponseKeyEnum.answerText)?.targets || [];
outputs.find((output) => output.key === ModuleOutputKeyEnum.answerText)?.targets || [];
if (targets.length === 0) return;
responseWrite({

View File

@@ -1,28 +1,32 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { countModelPrice } from '@/service/support/wallet/bill/utils';
import type { SelectedDatasetType } from '@fastgpt/global/core/module/api.d';
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModelTypeEnum } from '@/service/core/ai/model';
import { searchDatasetData } from '@/service/core/dataset/data/pg';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
type DatasetSearchProps = ModuleDispatchProps<{
datasets: SelectedDatasetType;
similarity: number;
limit: number;
rerank: boolean;
userChatInput: string;
[ModuleInputKeyEnum.datasetSelectList]: SelectedDatasetType;
[ModuleInputKeyEnum.datasetSimilarity]: number;
[ModuleInputKeyEnum.datasetLimit]: number;
[ModuleInputKeyEnum.datasetStartReRank]: boolean;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
export type KBSearchResponse = {
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
isEmpty?: boolean;
unEmpty?: boolean;
quoteQA: SearchDataResponseItemType[];
export type DatasetSearchResponse = {
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.datasetIsEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetUnEmpty]?: boolean;
[ModuleOutputKeyEnum.datasetQuoteQA]: SearchDataResponseItemType[];
};
export async function dispatchDatasetSearch(props: DatasetSearchProps): Promise<KBSearchResponse> {
export async function dispatchDatasetSearch(
props: DatasetSearchProps
): Promise<DatasetSearchResponse> {
const {
teamId,
tmbId,
inputs: { datasets = [], similarity = 0.4, limit = 5, rerank, userChatInput }
} = props as DatasetSearchProps;
@@ -56,6 +60,7 @@ export async function dispatchDatasetSearch(props: DatasetSearchProps): Promise<
tokens: tokenLen,
type: ModelTypeEnum.vector
}),
query: userChatInput,
model: vectorModel.name,
tokens: tokenLen,
similarity,

View File

@@ -1,17 +1,17 @@
import { NextApiResponse } from 'next';
import { SystemInputEnum, SystemOutputEnum } from '@/constants/app';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import { RunningModuleItemType } from '@/types/app';
import { ModuleDispatchProps } from '@/types/core/chat/type';
import { ChatHistoryItemResType } from '@fastgpt/global/core/chat/api';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleItemType } from '@fastgpt/global/core/module/type';
import { UserType } from '@fastgpt/global/support/user/type';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { replaceVariable } from '@fastgpt/global/common/string/tools';
import { responseWrite } from '@fastgpt/service/common/response';
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { getSystemTime } from '@fastgpt/global/common/time/timezone';
import { initModuleType } from '@/constants/flow';
import { initRunningModuleType } from '../core/modules/constant';
import { dispatchHistory } from './init/history';
import { dispatchChatInput } from './init/userChatInput';
@@ -29,6 +29,7 @@ import { dispatchPluginOutput } from './plugin/runOutput';
/* running */
export async function dispatchModules({
res,
appId,
chatId,
modules,
user,
@@ -40,6 +41,7 @@ export async function dispatchModules({
detail = false
}: {
res: NextApiResponse;
appId: string;
chatId?: string;
modules: ModuleItemType[];
user: UserType;
@@ -93,60 +95,66 @@ export async function dispatchModules({
runningTime = time;
const isResponseAnswerText =
inputs.find((item) => item.key === SystemInputEnum.isResponseAnswerText)?.value ?? true;
inputs.find((item) => item.key === ModuleInputKeyEnum.aiChatIsResponseText)?.value ?? true;
if (isResponseAnswerText) {
chatAnswerText += answerText;
}
}
function moduleInput(
module: RunningModuleItemType,
data: Record<string, any> = {}
): Promise<any> {
const checkInputFinish = () => {
return !module.inputs.find((item: any) => item.value === undefined);
};
function moduleInput(module: RunningModuleItemType, data: Record<string, any> = {}) {
const updateInputValue = (key: string, value: any) => {
const index = module.inputs.findIndex((item: any) => item.key === key);
if (index === -1) return;
module.inputs[index].value = value;
};
const set = new Set();
return Promise.all(
Object.entries(data).map(([key, val]: any) => {
updateInputValue(key, val);
if (!set.has(module.moduleId) && checkInputFinish()) {
set.add(module.moduleId);
// remove switch
updateInputValue(SystemInputEnum.switch, undefined);
return moduleRun(module);
}
})
);
Object.entries(data).map(([key, val]: any) => {
updateInputValue(key, val);
});
return;
}
function moduleOutput(
module: RunningModuleItemType,
result: Record<string, any> = {}
): Promise<any> {
pushStore(module, result);
const nextRunModules: RunningModuleItemType[] = [];
// Assign the output value to the next module
module.outputs.map((outputItem) => {
if (result[outputItem.key] === undefined) return;
/* update output value */
outputItem.value = result[outputItem.key];
/* update target */
outputItem.targets.map((target: any) => {
// find module
const targetModule = runningModules.find((item) => item.moduleId === target.moduleId);
if (!targetModule) return;
// push to running queue
nextRunModules.push(targetModule);
// update input
moduleInput(targetModule, { [target.key]: outputItem.value });
});
});
return checkModulesCanRun(nextRunModules);
}
function checkModulesCanRun(modules: RunningModuleItemType[] = []) {
const set = new Set<string>();
const filterModules = modules.filter((module) => {
if (set.has(module.moduleId)) return false;
set.add(module.moduleId);
return true;
});
return Promise.all(
module.outputs.map((outputItem) => {
if (result[outputItem.key] === undefined) return;
/* update output value */
outputItem.value = result[outputItem.key];
/* update target */
return Promise.all(
outputItem.targets.map((target: any) => {
// find module
const targetModule = runningModules.find((item) => item.moduleId === target.moduleId);
if (!targetModule) return;
return moduleInput(targetModule, { [target.key]: outputItem.value });
})
);
filterModules.map((module) => {
if (!module.inputs.find((item: any) => item.value === undefined)) {
moduleInput(module, { [ModuleInputKeyEnum.switch]: undefined });
return moduleRun(module);
}
})
);
}
@@ -168,6 +176,8 @@ export async function dispatchModules({
});
const props: ModuleDispatchProps<Record<string, any>> = {
res,
appId,
chatId,
stream,
detail,
variables,
@@ -175,7 +185,6 @@ export async function dispatchModules({
user,
teamId,
tmbId,
chatId,
inputs: params
};
@@ -201,29 +210,29 @@ export async function dispatchModules({
})();
const formatResponseData = (() => {
if (!dispatchRes[TaskResponseKeyEnum.responseData]) return undefined;
if (Array.isArray(dispatchRes[TaskResponseKeyEnum.responseData]))
return dispatchRes[TaskResponseKeyEnum.responseData];
if (!dispatchRes[ModuleOutputKeyEnum.responseData]) return undefined;
if (Array.isArray(dispatchRes[ModuleOutputKeyEnum.responseData]))
return dispatchRes[ModuleOutputKeyEnum.responseData];
return {
...dispatchRes[TaskResponseKeyEnum.responseData],
moduleName: module.name,
moduleType: module.flowType
moduleType: module.flowType,
...dispatchRes[ModuleOutputKeyEnum.responseData]
};
})();
return moduleOutput(module, {
[SystemOutputEnum.finish]: true,
[ModuleOutputKeyEnum.finish]: true,
...dispatchRes,
[TaskResponseKeyEnum.responseData]: formatResponseData
[ModuleOutputKeyEnum.responseData]: formatResponseData
});
}
// start process width initInput
const initModules = runningModules.filter((item) => initModuleType[item.flowType]);
const initModules = runningModules.filter((item) => initRunningModuleType[item.flowType]);
initModules.map((module) => moduleInput(module, params));
await checkModulesCanRun(initModules);
await Promise.all(initModules.map((module) => moduleInput(module, params)));
// focus running pluginOutput
// focus try to run pluginOutput
const pluginOutputModule = runningModules.find(
(item) => item.flowType === FlowNodeTypeEnum.pluginOutput
);
@@ -232,8 +241,8 @@ export async function dispatchModules({
}
return {
[TaskResponseKeyEnum.answerText]: chatAnswerText,
[TaskResponseKeyEnum.responseData]: chatResponse
[ModuleOutputKeyEnum.answerText]: chatAnswerText,
[ModuleOutputKeyEnum.responseData]: chatResponse
};
}
@@ -249,7 +258,7 @@ function loadModules(
flowType: module.flowType,
showStatus: module.showStatus,
inputs: module.inputs
.filter((item) => item.connected) // filter unconnected target input
.filter((item) => item.connected || item.value !== undefined) // filter unconnected target input
.map((item) => {
if (typeof item.value !== 'string') {
return {
@@ -269,14 +278,14 @@ function loadModules(
outputs: module.outputs
.map((item) => ({
key: item.key,
answer: item.key === TaskResponseKeyEnum.answerText,
answer: item.key === ModuleOutputKeyEnum.answerText,
value: undefined,
targets: item.targets
}))
.sort((a, b) => {
// finish output always at last
if (a.key === SystemOutputEnum.finish) return 1;
if (b.key === SystemOutputEnum.finish) return -1;
if (a.key === ModuleOutputKeyEnum.finish) return 1;
if (b.key === ModuleOutputKeyEnum.finish) return -1;
return 0;
})
};

View File

@@ -1,9 +1,9 @@
import { SystemInputEnum } from '@/constants/app';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ChatItemType } from '@fastgpt/global/core/chat/type.d';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
export type HistoryProps = ModuleDispatchProps<{
maxContext: number;
[SystemInputEnum.history]: ChatItemType[];
[ModuleInputKeyEnum.history]: ChatItemType[];
}>;
export const dispatchHistory = (props: Record<string, any>) => {

View File

@@ -1,7 +1,7 @@
import { SystemInputEnum } from '@/constants/app';
import { ModuleInputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
export type UserChatInputProps = ModuleDispatchProps<{
[SystemInputEnum.userChatInput]: string;
[ModuleInputKeyEnum.userChatInput]: string;
}>;
export const dispatchChatInput = (props: Record<string, any>) => {

View File

@@ -1,24 +1,24 @@
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { dispatchModules } from '../index';
import {
FlowNodeSpecialInputKeyEnum,
FlowNodeTypeEnum
} from '@fastgpt/global/core/module/node/constant';
import { FlowNodeTypeEnum } from '@fastgpt/global/core/module/node/constant';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { MongoPlugin } from '@fastgpt/service/core/plugin/schema';
import { getPluginRuntimeById } from '@fastgpt/service/core/plugin/controller';
import { authPluginCanUse } from '@fastgpt/service/support/permission/auth/plugin';
type RunPluginProps = ModuleDispatchProps<{
[FlowNodeSpecialInputKeyEnum.pluginId]: string;
[ModuleInputKeyEnum.pluginId]: string;
[key: string]: any;
}>;
type RunPluginResponse = {
answerText: string;
[TaskResponseKeyEnum.responseData]?: moduleDispatchResType[];
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.responseData]?: moduleDispatchResType;
};
export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPluginResponse> => {
const {
teamId,
tmbId,
inputs: { pluginId, ...data }
} = props;
@@ -26,14 +26,15 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
return Promise.reject('Input is empty');
}
const plugin = await MongoPlugin.findOne({ _id: pluginId });
if (!plugin) {
return Promise.reject('Plugin not found');
}
await authPluginCanUse({ id: pluginId, teamId, tmbId });
const plugin = await getPluginRuntimeById(pluginId);
const { responseData, answerText } = await dispatchModules({
...props,
modules: plugin.modules,
modules: plugin.modules.map((module) => ({
...module,
showStatus: false
})),
params: data
});
@@ -45,10 +46,12 @@ export const dispatchRunPlugin = async (props: RunPluginProps): Promise<RunPlugi
return {
answerText,
// [TaskResponseKeyEnum.responseData]: output,
[TaskResponseKeyEnum.responseData]: responseData.filter(
(item) => item.moduleType !== FlowNodeTypeEnum.pluginOutput
),
responseData: {
moduleLogo: plugin.avatar,
price: responseData.reduce((sum, item) => sum + item.price, 0),
runningTime: responseData.reduce((sum, item) => sum + (item.runningTime || 0), 0),
pluginOutput: output?.pluginOutput
},
...(output ? output.pluginOutput : {})
};
};

View File

@@ -1,19 +1,19 @@
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
export type PluginOutputProps = ModuleDispatchProps<{
[key: string]: any;
}>;
export type PluginOutputResponse = {
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
};
export const dispatchPluginOutput = (props: PluginOutputProps): PluginOutputResponse => {
const { inputs } = props;
return {
[TaskResponseKeyEnum.responseData]: {
responseData: {
price: 0,
pluginOutput: inputs
}

View File

@@ -1,13 +1,13 @@
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { responseWrite } from '@fastgpt/service/common/response';
import { textAdaptGptResponse } from '@/utils/adapt';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
export type AnswerProps = ModuleDispatchProps<{
text: string;
}>;
export type AnswerResponse = {
[TaskResponseKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.answerText]: string;
};
export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
@@ -31,6 +31,6 @@ export const dispatchAnswer = (props: Record<string, any>): AnswerResponse => {
}
return {
[TaskResponseKeyEnum.answerText]: formatText
answerText: formatText
};
};

View File

@@ -1,14 +1,14 @@
import { HttpPropsEnum } from '@/constants/flow/flowField';
import type { moduleDispatchResType } from '@fastgpt/global/core/chat/type.d';
import { TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import type { ModuleDispatchProps } from '@/types/core/chat/type';
import { ModuleInputKeyEnum, ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
export type HttpRequestProps = ModuleDispatchProps<{
[HttpPropsEnum.url]: string;
[ModuleInputKeyEnum.httpUrl]: string;
[key: string]: any;
}>;
export type HttpResponse = {
[HttpPropsEnum.failed]?: boolean;
[TaskResponseKeyEnum.responseData]: moduleDispatchResType;
[ModuleOutputKeyEnum.failed]?: boolean;
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType;
[key: string]: any;
};
@@ -32,7 +32,7 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
});
return {
[TaskResponseKeyEnum.responseData]: {
responseData: {
price: 0,
body: requestBody,
httpResult: response
@@ -41,8 +41,8 @@ export const dispatchHttpRequest = async (props: Record<string, any>): Promise<H
};
} catch (error) {
return {
[HttpPropsEnum.failed]: true,
[TaskResponseKeyEnum.responseData]: {
[ModuleOutputKeyEnum.failed]: true,
responseData: {
price: 0,
body: requestBody,
httpResult: { error }

View File

@@ -4,9 +4,10 @@ import { SelectAppItemType } from '@fastgpt/global/core/module/type';
import { dispatchModules } from '../index';
import { MongoApp } from '@fastgpt/service/core/app/schema';
import { responseWrite } from '@fastgpt/service/common/response';
import { ChatRoleEnum, TaskResponseKeyEnum } from '@fastgpt/global/core/chat/constants';
import { ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
import { sseResponseEventEnum } from '@fastgpt/service/common/response/constant';
import { textAdaptGptResponse } from '@/utils/adapt';
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
type Props = ModuleDispatchProps<{
userChatInput: string;
@@ -14,9 +15,9 @@ type Props = ModuleDispatchProps<{
app: SelectAppItemType;
}>;
type Response = {
[TaskResponseKeyEnum.responseData]: moduleDispatchResType[];
[TaskResponseKeyEnum.answerText]: string;
[TaskResponseKeyEnum.history]: ChatItemType[];
[ModuleOutputKeyEnum.responseData]: moduleDispatchResType[];
[ModuleOutputKeyEnum.answerText]: string;
[ModuleOutputKeyEnum.history]: ChatItemType[];
};
export const dispatchAppRequest = async (props: Props): Promise<Response> => {
@@ -53,6 +54,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
const { responseData, answerText } = await dispatchModules({
...props,
appId: app.id,
modules: appData.modules,
params: {
history,
@@ -73,7 +75,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
return {
responseData,
[TaskResponseKeyEnum.answerText]: answerText,
[TaskResponseKeyEnum.history]: completeMessages
answerText: answerText,
history: completeMessages
};
};

View File

@@ -4,7 +4,7 @@ import { initPg } from '@fastgpt/service/common/pg';
import { MongoUser } from '@fastgpt/service/support/user/schema';
import { connectMongo } from '@fastgpt/service/common/mongo/init';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { getInitConfig, initGlobal } from '@/pages/api/system/getInitData';
import { getInitConfig } from '@/pages/api/system/getInitData';
import { createDefaultTeam } from '@fastgpt/service/support/user/team/controller';
import { exit } from 'process';
@@ -14,7 +14,6 @@ import { exit } from 'process';
export function connectToDatabase(): Promise<void> {
return connectMongo({
beforeHook: () => {
initGlobal();
getInitConfig();
},
afterHook: () => {