mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
Fix 4.8 node (#1370)
* perf: runtime props * fix: Plugin run faied in debug mode * perf: variable update * fix: ts * perf: variable ui
This commit is contained in:
@@ -25,6 +25,7 @@ import {
|
||||
} from '../../../../common/string/tiktoken/index';
|
||||
import {
|
||||
chats2GPTMessages,
|
||||
chatValue2RuntimePrompt,
|
||||
getSystemPrompt,
|
||||
GPTMessages2Chats,
|
||||
runtimePrompt2ChatsValue
|
||||
@@ -66,7 +67,7 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
user,
|
||||
histories,
|
||||
node: { name },
|
||||
inputFiles = [],
|
||||
query,
|
||||
params: {
|
||||
model,
|
||||
temperature = 0,
|
||||
@@ -80,6 +81,8 @@ export const dispatchChatCompletion = async (props: ChatProps): Promise<ChatResp
|
||||
quotePrompt
|
||||
}
|
||||
} = props;
|
||||
const { files: inputFiles } = chatValue2RuntimePrompt(query);
|
||||
|
||||
if (!userChatInput && inputFiles.length === 0) {
|
||||
return Promise.reject('Question is empty');
|
||||
}
|
||||
|
@@ -289,7 +289,8 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
node,
|
||||
runtimeNodes,
|
||||
runtimeEdges,
|
||||
params
|
||||
params,
|
||||
mode: 'test'
|
||||
};
|
||||
|
||||
// run module
|
||||
@@ -357,7 +358,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
if (pluginOutputModule && props.mode !== 'debug') {
|
||||
await nodeRunWithActive(pluginOutputModule);
|
||||
}
|
||||
const { userChatInput, ...leftVariables } = variables;
|
||||
|
||||
return {
|
||||
flowResponses: chatResponses,
|
||||
flowUsages: chatNodeUsages,
|
||||
@@ -369,7 +370,7 @@ export async function dispatchWorkFlow(data: Props): Promise<DispatchFlowRespons
|
||||
[DispatchNodeResponseKeyEnum.assistantResponses]:
|
||||
mergeAssistantResponseAnswerText(chatAssistantResponse),
|
||||
[DispatchNodeResponseKeyEnum.toolResponses]: toolRunResponse,
|
||||
newVariables: leftVariables
|
||||
newVariables: removeSystemVariable(variables)
|
||||
};
|
||||
}
|
||||
|
||||
@@ -407,6 +408,18 @@ export function getSystemVariable({
|
||||
};
|
||||
}
|
||||
|
||||
/* remove system variable */
|
||||
const removeSystemVariable = (variables: Record<string, any>) => {
|
||||
const copyVariables = { ...variables };
|
||||
delete copyVariables.appId;
|
||||
delete copyVariables.chatId;
|
||||
delete copyVariables.responseChatItemId;
|
||||
delete copyVariables.histories;
|
||||
delete copyVariables.cTime;
|
||||
|
||||
return copyVariables;
|
||||
};
|
||||
|
||||
/* Merge consecutive text messages into one */
|
||||
export const mergeAssistantResponseAnswerText = (response: AIChatItemValueItemType[]) => {
|
||||
const result: AIChatItemValueItemType[] = [];
|
||||
|
@@ -1,15 +1,19 @@
|
||||
import { chatValue2RuntimePrompt } from '@fastgpt/global/core/chat/adapt';
|
||||
import { UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type/index.d';
|
||||
export type UserChatInputProps = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.userChatInput]: string;
|
||||
[NodeInputKeyEnum.inputFiles]: UserChatItemValueItemType['file'][];
|
||||
}>;
|
||||
|
||||
export const dispatchWorkflowStart = (props: Record<string, any>) => {
|
||||
const {
|
||||
variables: { userChatInput },
|
||||
params: { userChatInput: query }
|
||||
} = props as UserChatInputProps;
|
||||
const { query } = props as UserChatInputProps;
|
||||
|
||||
const { text, files } = chatValue2RuntimePrompt(query);
|
||||
|
||||
return {
|
||||
userChatInput: query || userChatInput
|
||||
[NodeInputKeyEnum.userChatInput]: text,
|
||||
[NodeInputKeyEnum.inputFiles]: files
|
||||
};
|
||||
};
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import type { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type/index.d';
|
||||
import { dispatchWorkFlow } from '../index';
|
||||
import { FlowNodeTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
|
||||
import { NodeInputKeyEnum, WorkflowIOValueTypeEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { NodeInputKeyEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { DispatchNodeResponseKeyEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
import { getPluginRuntimeById } from '../../../plugin/controller';
|
||||
import { authPluginCanUse } from '../../../../support/permission/auth/plugin';
|
||||
|
@@ -35,7 +35,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
stream,
|
||||
detail,
|
||||
histories,
|
||||
inputFiles,
|
||||
query,
|
||||
params: { userChatInput, history, app }
|
||||
} = props;
|
||||
let start = Date.now();
|
||||
@@ -71,7 +71,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
runtimeNodes: storeNodes2RuntimeNodes(appData.modules, getDefaultEntryNodeIds(appData.modules)),
|
||||
runtimeEdges: initWorkflowEdgeStatus(appData.edges),
|
||||
histories: chatHistories,
|
||||
inputFiles,
|
||||
query,
|
||||
variables: {
|
||||
...props.variables,
|
||||
userChatInput
|
||||
@@ -81,10 +81,7 @@ export const dispatchAppRequest = async (props: Props): Promise<Response> => {
|
||||
const completeMessages = chatHistories.concat([
|
||||
{
|
||||
obj: ChatRoleEnum.Human,
|
||||
value: runtimePrompt2ChatsValue({
|
||||
files: inputFiles,
|
||||
text: userChatInput
|
||||
})
|
||||
value: query
|
||||
},
|
||||
{
|
||||
obj: ChatRoleEnum.AI,
|
||||
|
@@ -4,6 +4,7 @@ import { DispatchNodeResultType } from '@fastgpt/global/core/workflow/runtime/ty
|
||||
import { getReferenceVariableValue } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { TUpdateListItem } from '@fastgpt/global/core/workflow/template/system/variableUpdate/type';
|
||||
import { ModuleDispatchProps } from '@fastgpt/global/core/workflow/type';
|
||||
import { valueTypeFormat } from '../utils';
|
||||
|
||||
type Props = ModuleDispatchProps<{
|
||||
[NodeInputKeyEnum.updateList]: TUpdateListItem[];
|
||||
@@ -22,26 +23,31 @@ export const dispatchUpdateVariable = async (
|
||||
if (!varNodeId || !varKey) {
|
||||
return;
|
||||
}
|
||||
let value = '';
|
||||
if (!item.value?.[0]) {
|
||||
value = item.value?.[1];
|
||||
} else {
|
||||
value = getReferenceVariableValue({
|
||||
value: item.value,
|
||||
variables,
|
||||
nodes: runtimeNodes
|
||||
});
|
||||
}
|
||||
|
||||
const value = (() => {
|
||||
if (!item.value?.[0]) {
|
||||
return valueTypeFormat(item.value?.[1], item.valueType);
|
||||
} else {
|
||||
return getReferenceVariableValue({
|
||||
value: item.value,
|
||||
variables,
|
||||
nodes: runtimeNodes
|
||||
});
|
||||
}
|
||||
})();
|
||||
|
||||
if (varNodeId === VARIABLE_NODE_ID) {
|
||||
// update global variable
|
||||
variables[varKey] = value;
|
||||
} else {
|
||||
const node = runtimeNodes.find((node) => node.nodeId === varNodeId);
|
||||
if (node) {
|
||||
const output = node.outputs.find((output) => output.id === varKey);
|
||||
if (output) {
|
||||
output.value = value;
|
||||
}
|
||||
}
|
||||
runtimeNodes
|
||||
.find((node) => node.nodeId === varNodeId)
|
||||
?.outputs?.find((output) => {
|
||||
if (output.id === varKey) {
|
||||
output.value = value;
|
||||
return true;
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user