V4.9.5 feature (#4520)

* readme

* Add queue log

* Test interactive (#4509)

* Support nested node interaction (#4503)

* feat: Add a new InteractiveContext type and update InteractiveBasicType, adding an optional context property to support more complex interaction state management.

* feat: Enhance workflow interactivity by adding InteractiveContext support and updating dispatch logic to manage nested contexts and entry nodes more effectively.

* feat: Refactor dispatchWorkFlow to utilize InteractiveContext for improved context management

* feat: Enhance entry node resolution by adding validation for entryNodeIds and recursive search in InteractiveContext

* feat: Remove workflowDepth from InteractiveContext and update recovery logic to utilize parentContext for improved context management

* feat: Update getWorkflowEntryNodeIds to use lastInteractive for improved context handling in runtime nodes

* feat: Add lastInteractive support to enhance context management across workflow components

* feat: Enhance interactive workflow by adding stopForInteractive flag and improving memory edge validation in runtime logic

* feat: Refactor InteractiveContext by removing interactiveAppId and updating runtime edge handling in dispatchRunApp for improved context management

* feat: Simplify runtime node and edge initialization in dispatchRunApp by using ternary operators for improved readability and maintainability

* feat: Improve memory edge validation in initWorkflowEdgeStatus by adding detailed comments for better understanding of subset checks and recursive context searching

* feat: Remove commented-out current level information from InteractiveContext for cleaner code and improved readability

* feat: Simplify stopForInteractive check in dispatchWorkFlow for improved code clarity and maintainability

* feat: Remove stopForInteractive handling and related references for improved code clarity and maintainability

* feat: Add interactive response handling in dispatchRunAppNode for enhanced workflow interactivity

* feat: Add context property to InteractiveBasicType and InteractiveNodeType for improved interactivity management

* feat: remove comments

* feat: Remove the node property from ChatDispatchProps to simplify type definitions

* feat: Remove workflowInteractiveResponse from dispatchRunAppNode for cleaner code

* feat: Refactor interactive value handling in chat history processing for improved clarity

* feat: Simplify initWorkflowEdgeStatus logic for better readability and maintainability

* feat: Add workflowInteractiveResponse to dispatchWorkFlow for enhanced functionality

* feat: Enhance interactive response handling with nested children support

* feat: Remove commented-out code for interactive node handling to improve clarity

* feat: remove  InteractiveContext type

* feat: Refactor UserSelectInteractive and UserInputInteractive params for improved structure and clarity

* feat: remove

* feat: The front end supports extracting the deepest interaction parameters to enhance interaction processing

* feat: The front end supports extracting the deepest interaction parameters to enhance interaction processing

* fix: handle undefined interactive values in runtimeEdges and runtimeNodes initialization

* fix: handle undefined interactive values in runtimeNodes and runtimeEdges initialization

* fix: update runtimeNodes and runtimeEdges initialization to use last interactive value

* fix: remove unused imports and replace getLastInteractiveValue with lastInteractive in runtimeEdges initialization

* fix: import WorkflowInteractiveResponseType and handle lastInteractive as undefined in chatTest

* feat: implement extractDeepestInteractive function and refactor usage in AIResponseBox and ChatBox utils

* fix: refactor initWorkflowEdgeStatus and getWorkflowEntryNodeIds calls in dispatchRunAppNode for recovery handling

* fix: ensure lastInteractive is handled consistently as undefined in runtimeEdges and runtimeNodes initialization

* fix: update dispatchFormInput and dispatchUserSelect to use lastInteractive consistently

* fix: update condition checks in dispatchFormInput and dispatchUserSelect to ensure lastInteractive type is validated correctly

* fix: refactor dispatchRunAppNode to replace isRecovery with childrenInteractive for improved clarity in runtimeNodes and runtimeEdges initialization

* refactor: streamline runtimeNodes and runtimeEdges initialization in dispatchRunAppNode for improved readability and maintainability

* fix: update rewriteNodeOutputByHistories function to accept runtimeNodes and interactive as parameters for improved clarity

* fix: simplify interactiveResponse assignment in dispatchWorkFlow for improved clarity

* fix: update entryNodeIds check in getWorkflowEntryNodeIds to ensure it's an array for improved reliability

* remove some invalid code

---------

Co-authored-by: Theresa <63280168+sd0ric4@users.noreply.github.com>

* update doc

* update log

* fix: update debug workflow to conditionally include nextStepSkipNodes… (#4511)

* fix: update debug workflow to conditionally include nextStepSkipNodes based on lastInteractive for improved debugging accuracy

* fix : type error

* remove invalid code

* fix: QA queue

* fix: interactive

* Test log (#4519)

* add log (#4504)

* add log

* update log i18n

* update log

* delete template

* add i18NT

* add team operation log

---------

Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>

* remove search

* update doc

---------

Co-authored-by: Theresa <63280168+sd0ric4@users.noreply.github.com>
Co-authored-by: gggaaallleee <91131304+gggaaallleee@users.noreply.github.com>
This commit is contained in:
Archer
2025-04-12 12:48:19 +08:00
committed by GitHub
parent b51a87f5b7
commit 16a22bc76a
34 changed files with 661 additions and 203 deletions

View File

@@ -6,6 +6,7 @@ import {
import { ChatBoxInputType, UserInputFileItemType } from './type';
import { getFileIcon } from '@fastgpt/global/common/file/icon';
import { ChatItemValueTypeEnum, ChatStatusEnum } from '@fastgpt/global/core/chat/constants';
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
export const formatChatValue2InputType = (value?: ChatItemValueItemType[]): ChatBoxInputType => {
if (!value) {
@@ -82,17 +83,19 @@ export const setUserSelectResultToHistories = (
i !== item.value.length - 1 ||
val.type !== ChatItemValueTypeEnum.interactive ||
!val.interactive
)
) {
return val;
}
if (val.interactive.type === 'userSelect') {
const finalInteractive = extractDeepestInteractive(val.interactive);
if (finalInteractive.type === 'userSelect') {
return {
...val,
interactive: {
...val.interactive,
...finalInteractive,
params: {
...val.interactive.params,
userSelectedVal: val.interactive.params.userSelectOptions.find(
...finalInteractive.params,
userSelectedVal: finalInteractive.params.userSelectOptions.find(
(item) => item.value === interactiveVal
)?.value
}
@@ -100,13 +103,13 @@ export const setUserSelectResultToHistories = (
};
}
if (val.interactive.type === 'userInput') {
if (finalInteractive.type === 'userInput') {
return {
...val,
interactive: {
...val.interactive,
...finalInteractive,
params: {
...val.interactive.params,
...finalInteractive.params,
submitted: true
}
}

View File

@@ -28,6 +28,7 @@ import { isEqual } from 'lodash';
import { useTranslation } from 'next-i18next';
import { eventBus, EventNameEnum } from '@/web/common/utils/eventbus';
import { SelectOptionsComponent, FormInputComponent } from './Interactive/InteractiveComponents';
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
const accordionButtonStyle = {
w: 'auto',
@@ -245,11 +246,12 @@ const AIResponseBox = ({
return <RenderTool showAnimation={isChatting} tools={value.tools} />;
}
if (value.type === ChatItemValueTypeEnum.interactive && value.interactive) {
if (value.interactive.type === 'userSelect') {
return <RenderUserSelectInteractive interactive={value.interactive} />;
const finalInteractive = extractDeepestInteractive(value.interactive);
if (finalInteractive.type === 'userSelect') {
return <RenderUserSelectInteractive interactive={finalInteractive} />;
}
if (value.interactive?.type === 'userInput') {
return <RenderUserFormInteractive interactive={value.interactive} />;
if (finalInteractive.type === 'userInput') {
return <RenderUserFormInteractive interactive={finalInteractive} />;
}
}
return null;

View File

@@ -303,7 +303,7 @@ function MemberTable({ Tabs }: { Tabs: React.ReactNode }) {
})()}
</Td>
<Td maxW={'300px'}>
<VStack gap={0}>
<VStack gap={0} align="start">
<Box>{format(new Date(member.createTime), 'yyyy-MM-dd HH:mm:ss')}</Box>
<Box>
{member.updateTime

View File

@@ -0,0 +1,102 @@
import {
Box,
Button,
Flex,
Table,
TableContainer,
Tbody,
Td,
Th,
Thead,
Tr
} from '@chakra-ui/react';
import { useState } from 'react';
import { useTranslation } from 'next-i18next';
import MyBox from '@fastgpt/web/components/common/MyBox';
import SearchInput from '@fastgpt/web/components/common/Input/SearchInput';
import { useScrollPagination } from '@fastgpt/web/hooks/useScrollPagination';
import { getOperationLogs } from '@/web/support/user/team/operantionLog/api';
import { TeamPermission } from '@fastgpt/global/support/permission/user/controller';
import { operationLogI18nMap } from '@fastgpt/service/support/operationLog/constants';
import { OperationLogEventEnum } from '@fastgpt/global/support/operationLog/constants';
import { formatTime2YMDHMS } from '@fastgpt/global/common/string/time';
import UserBox from '@fastgpt/web/components/common/UserBox';
function OperationLogTable({ Tabs }: { Tabs: React.ReactNode }) {
const { t } = useTranslation();
const [searchKey, setSearchKey] = useState<string>('');
const {
data: operationLogs = [],
isLoading: loadingLogs,
ScrollData: LogScrollData
} = useScrollPagination(getOperationLogs, {
pageSize: 20,
refreshDeps: [searchKey],
throttleWait: 500,
debounceWait: 200
});
const isLoading = loadingLogs;
return (
<>
<Flex justify={'space-between'} align={'center'} pb={'1rem'}>
{Tabs}
</Flex>
<MyBox isLoading={isLoading} flex={'1 0 0'} overflow={'auto'}>
<LogScrollData>
<TableContainer overflow={'unset'} fontSize={'sm'}>
<Table overflow={'unset'}>
<Thead>
<Tr bgColor={'white !important'}>
<Th borderLeftRadius="6px" bgColor="myGray.100">
{t('account_team:log_user')}
</Th>
<Th bgColor="myGray.100">{t('account_team:log_time')}</Th>
<Th bgColor="myGray.100">{t('account_team:log_type')}</Th>
<Th bgColor="myGray.100">{t('account_team:log_details')}</Th>
</Tr>
</Thead>
<Tbody>
{operationLogs?.map((log) => {
const i18nData = operationLogI18nMap[log.event];
const metadata = { ...log.metadata };
if (log.event === OperationLogEventEnum.ASSIGN_PERMISSION) {
const permissionValue = parseInt(metadata.permission, 10);
const permission = new TeamPermission({ per: permissionValue });
metadata.appCreate = permission.hasAppCreatePer ? '✔' : '✘';
metadata.datasetCreate = permission.hasDatasetCreatePer ? '✔' : '✘';
metadata.apiKeyCreate = permission.hasApikeyCreatePer ? '✔' : '✘';
metadata.manage = permission.hasManagePer ? '✔' : '✘';
}
return i18nData ? (
<Tr key={log._id} overflow={'unset'}>
<Td>
<UserBox
sourceMember={log.sourceMember}
fontSize="sm"
avatarSize="1rem"
spacing={0.5}
/>
</Td>
<Td>{formatTime2YMDHMS(log.timestamp)}</Td>
<Td>{t(i18nData.typeLabel)}</Td>
<Td>{t(i18nData.content, metadata as any) as string}</Td>
</Tr>
) : null;
})}
</Tbody>
</Table>
</TableContainer>
</LogScrollData>
</MyBox>
</>
);
}
export default OperationLogTable;

View File

@@ -13,7 +13,10 @@ import {
SelectOptionsComponent
} from '@/components/core/chat/components/Interactive/InteractiveComponents';
import { UserInputInteractive } from '@fastgpt/global/core/workflow/template/system/interactive/type';
import { initWorkflowEdgeStatus } from '@fastgpt/global/core/workflow/runtime/utils';
import {
getLastInteractiveValue,
initWorkflowEdgeStatus
} from '@fastgpt/global/core/workflow/runtime/utils';
import { ChatItemType, UserChatItemValueItemType } from '@fastgpt/global/core/chat/type';
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
@@ -130,10 +133,11 @@ const NodeDebugResponse = ({ nodeId, debugResult }: NodeDebugResponseProps) => {
}
];
const lastInteractive = getLastInteractiveValue(mockHistory);
onNextNodeDebug({
...workflowDebugData,
// Rewrite runtimeEdges
runtimeEdges: initWorkflowEdgeStatus(workflowDebugData.runtimeEdges, mockHistory),
runtimeEdges: initWorkflowEdgeStatus(workflowDebugData.runtimeEdges, lastInteractive),
query: updatedQuery,
history: mockHistory
});

View File

@@ -18,6 +18,7 @@ const MemberTable = dynamic(() => import('@/pageComponents/account/team/MemberTa
const PermissionManage = dynamic(
() => import('@/pageComponents/account/team/PermissionManage/index')
);
const OperationLogTable = dynamic(() => import('@/pageComponents/account/team/OperationLog/index'));
const GroupManage = dynamic(() => import('@/pageComponents/account/team/GroupManage/index'));
const OrgManage = dynamic(() => import('@/pageComponents/account/team/OrgManage/index'));
const HandleInviteModal = dynamic(
@@ -28,7 +29,8 @@ export enum TeamTabEnum {
member = 'member',
org = 'org',
group = 'group',
permission = 'permission'
permission = 'permission',
operationLog = 'operationLog'
}
const Team = () => {
@@ -57,7 +59,8 @@ const Team = () => {
{ label: t('account_team:member'), value: TeamTabEnum.member },
{ label: t('account_team:org'), value: TeamTabEnum.org },
{ label: t('account_team:group'), value: TeamTabEnum.group },
{ label: t('account_team:permission'), value: TeamTabEnum.permission }
{ label: t('account_team:permission'), value: TeamTabEnum.permission },
{ label: t('account_team:operation_log'), value: TeamTabEnum.operationLog }
]}
px={'1rem'}
value={teamTab}
@@ -150,6 +153,7 @@ const Team = () => {
{teamTab === TeamTabEnum.org && <OrgManage Tabs={Tabs} />}
{teamTab === TeamTabEnum.group && <GroupManage Tabs={Tabs} />}
{teamTab === TeamTabEnum.permission && <PermissionManage Tabs={Tabs} />}
{teamTab === TeamTabEnum.operationLog && <OperationLogTable Tabs={Tabs} />}
</Box>
</Flex>
{invitelinkid && <HandleInviteModal invitelinkid={invitelinkid} />}

View File

@@ -98,7 +98,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
const isPlugin = app.type === AppTypeEnum.plugin;
const userQuestion: UserChatItemType = (() => {
const userQuestion: UserChatItemType = await (async () => {
if (isPlugin) {
return getPluginRunUserQuery({
pluginInputs: getPluginInputsFromStoreNodes(app.modules),
@@ -107,9 +107,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
});
}
const latestHumanChat = chatMessages.pop() as UserChatItemType | undefined;
const latestHumanChat = chatMessages.pop() as UserChatItemType;
if (!latestHumanChat) {
throw new Error('User question is empty');
return Promise.reject('User question is empty');
}
return latestHumanChat;
})();
@@ -136,14 +136,14 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
const newHistories = concatHistories(histories, chatMessages);
const interactive = getLastInteractiveValue(newHistories) || undefined;
// Get runtimeNodes
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories));
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, interactive));
if (isPlugin) {
runtimeNodes = updatePluginInputByVariables(runtimeNodes, variables);
variables = {};
}
runtimeNodes = rewriteNodeOutputByHistories(newHistories, runtimeNodes);
runtimeNodes = rewriteNodeOutputByHistories(runtimeNodes, interactive);
const workflowResponseWrite = getWorkflowResponseWrite({
res,
@@ -175,9 +175,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
chatId,
responseChatItemId,
runtimeNodes,
runtimeEdges: initWorkflowEdgeStatus(edges, newHistories),
runtimeEdges: initWorkflowEdgeStatus(edges, interactive),
variables,
query: removeEmptyUserInput(userQuestion.value),
lastInteractive: interactive,
chatConfig,
histories: newHistories,
stream: true,

View File

@@ -10,6 +10,7 @@ import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { defaultApp } from '@/web/core/app/constants';
import { WORKFLOW_MAX_RUN_TIMES } from '@fastgpt/service/core/workflow/constants';
import { getLastInteractiveValue } from '@fastgpt/global/core/workflow/runtime/utils';
async function handler(
req: NextApiRequest,
@@ -44,6 +45,7 @@ async function handler(
// auth balance
const { timezone, externalProvider } = await getUserChatInfoAndAuthTeamPoints(tmbId);
const lastInteractive = getLastInteractiveValue(history);
/* start process */
const { flowUsages, flowResponses, debugResponse, newVariables, workflowInteractiveResponse } =
@@ -65,6 +67,7 @@ async function handler(
},
runtimeNodes: nodes,
runtimeEdges: edges,
lastInteractive,
variables,
query: query,
chatConfig: defaultApp.chatConfig,

View File

@@ -9,6 +9,8 @@ import { useIPFrequencyLimit } from '@fastgpt/service/common/middle/reqFrequency
import { pushTrack } from '@fastgpt/service/common/middle/tracks/utils';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { addOperationLog } from '@fastgpt/service/support/operationLog/addOperationLog';
import { OperationLogEventEnum } from '@fastgpt/global/support/operationLog/constants';
async function handler(req: NextApiRequest, res: NextApiResponse) {
const { username, password } = req.body as PostLoginProps;
@@ -64,6 +66,12 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
setCookie(res, token);
addOperationLog({
tmbId: userDetail.team.tmbId,
teamId: userDetail.team.teamId,
event: OperationLogEventEnum.LOGIN
});
return {
user: userDetail,
token

View File

@@ -139,7 +139,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
// Computed start hook params
const startHookText = (() => {
// Chat
const userQuestion = chatMessages[chatMessages.length - 1] as UserChatItemType | undefined;
const userQuestion = chatMessages[chatMessages.length - 1] as UserChatItemType;
if (userQuestion) return chatValue2RuntimePrompt(userQuestion.value).text;
// plugin
@@ -245,16 +245,17 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
// Get chat histories
const newHistories = concatHistories(histories, chatMessages);
const interactive = getLastInteractiveValue(newHistories) || undefined;
// Get runtimeNodes
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories));
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, interactive));
if (isPlugin) {
// Assign values to runtimeNodes using variables
runtimeNodes = updatePluginInputByVariables(runtimeNodes, variables);
// Plugin runtime does not need global variables(It has been injected into the pluginInputNode)
variables = {};
}
runtimeNodes = rewriteNodeOutputByHistories(newHistories, runtimeNodes);
runtimeNodes = rewriteNodeOutputByHistories(runtimeNodes, interactive);
const workflowResponseWrite = getWorkflowResponseWrite({
res,
@@ -288,7 +289,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
chatId,
responseChatItemId,
runtimeNodes,
runtimeEdges: initWorkflowEdgeStatus(edges, newHistories),
runtimeEdges: initWorkflowEdgeStatus(edges, interactive),
variables,
query: removeEmptyUserInput(userQuestion.value),
chatConfig,

View File

@@ -139,7 +139,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
// Computed start hook params
const startHookText = (() => {
// Chat
const userQuestion = chatMessages[chatMessages.length - 1] as UserChatItemType | undefined;
const userQuestion = chatMessages[chatMessages.length - 1] as UserChatItemType;
if (userQuestion) return chatValue2RuntimePrompt(userQuestion.value).text;
// plugin
@@ -245,16 +245,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
// Get chat histories
const newHistories = concatHistories(histories, chatMessages);
const interactive = getLastInteractiveValue(newHistories) || undefined;
// Get runtimeNodes
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, newHistories));
let runtimeNodes = storeNodes2RuntimeNodes(nodes, getWorkflowEntryNodeIds(nodes, interactive));
if (isPlugin) {
// Assign values to runtimeNodes using variables
runtimeNodes = updatePluginInputByVariables(runtimeNodes, variables);
// Plugin runtime does not need global variables(It has been injected into the pluginInputNode)
variables = {};
}
runtimeNodes = rewriteNodeOutputByHistories(newHistories, runtimeNodes);
runtimeNodes = rewriteNodeOutputByHistories(runtimeNodes, interactive);
const workflowResponseWrite = getWorkflowResponseWrite({
res,
@@ -288,9 +288,10 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
chatId,
responseChatItemId,
runtimeNodes,
runtimeEdges: initWorkflowEdgeStatus(edges, newHistories),
runtimeEdges: initWorkflowEdgeStatus(edges, interactive),
variables,
query: removeEmptyUserInput(userQuestion.value),
lastInteractive: interactive,
chatConfig,
histories: newHistories,
stream,

View File

@@ -33,9 +33,21 @@ const reduceQueue = () => {
return global.qaQueueLen === 0;
};
const reduceQueueAndReturn = (delay = 0) => {
reduceQueue();
if (delay) {
setTimeout(() => {
generateQA();
}, delay);
} else {
generateQA();
}
};
export async function generateQA(): Promise<any> {
const max = global.systemEnv?.qaMaxProcess || 10;
addLog.debug(`[QA Queue] Queue size: ${global.qaQueueLen}`);
if (global.qaQueueLen >= max) return;
global.qaQueueLen++;
@@ -98,14 +110,12 @@ export async function generateQA(): Promise<any> {
return;
}
if (error) {
reduceQueue();
return generateQA();
return reduceQueueAndReturn();
}
// auth balance
if (!(await checkTeamAiPointsAndLock(data.teamId))) {
reduceQueue();
return generateQA();
return reduceQueueAndReturn();
}
addLog.info(`[QA Queue] Start`);
@@ -137,14 +147,8 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
const qaArr = formatSplitText({ answer, rawText: text, llmModel: modelData }); // 格式化后的QA对
addLog.info(`[QA Queue] Finish`, {
time: Date.now() - startTime,
splitLength: qaArr.length,
usage: chatResponse.usage
});
// get vector and insert
const { insertLen } = await pushDataListToTrainingQueueByCollectionId({
await pushDataListToTrainingQueueByCollectionId({
teamId: data.teamId,
tmbId: data.tmbId,
collectionId: data.collectionId,
@@ -160,21 +164,21 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
await MongoDatasetTraining.findByIdAndDelete(data._id);
// add bill
if (insertLen > 0) {
pushQAUsage({
teamId: data.teamId,
tmbId: data.tmbId,
inputTokens: await countGptMessagesTokens(messages),
outputTokens: await countPromptTokens(answer),
billId: data.billId,
model: modelData.model
});
} else {
addLog.info(`QA result 0:`, { answer });
}
pushQAUsage({
teamId: data.teamId,
tmbId: data.tmbId,
inputTokens: await countGptMessagesTokens(messages),
outputTokens: await countPromptTokens(answer),
billId: data.billId,
model: modelData.model
});
addLog.info(`[QA Queue] Finish`, {
time: Date.now() - startTime,
splitLength: qaArr.length,
usage: chatResponse.usage
});
reduceQueue();
generateQA();
return reduceQueueAndReturn();
} catch (err: any) {
addLog.error(`[QA Queue] Error`, err);
await MongoDatasetTraining.updateOne(
@@ -188,9 +192,7 @@ ${replaceVariable(Prompt_AgentQA.fixedText, { text })}`;
}
);
setTimeout(() => {
generateQA();
}, 1000);
return reduceQueueAndReturn(1000);
}
}

View File

@@ -35,6 +35,8 @@ const reduceQueueAndReturn = (delay = 0) => {
/* 索引生成队列。每导入一次,就是一个单独的线程 */
export async function generateVector(): Promise<any> {
const max = global.systemEnv?.vectorMaxProcess || 10;
addLog.debug(`[Vector Queue] Queue size: ${global.vectorQueueLen}`);
if (global.vectorQueueLen >= max) return;
global.vectorQueueLen++;
const start = Date.now();

View File

@@ -0,0 +1,9 @@
import { GET, POST, PUT } from '@/web/common/api/request';
import type { PaginationProps, PaginationResponse } from '@fastgpt/web/common/fetch/type';
import type { OperationListItemType } from '@fastgpt/global/support/operationLog/type';
export const getOperationLogs = (props: PaginationProps<PaginationProps>) =>
POST<PaginationResponse<OperationListItemType>>(
`/proApi/support/user/team/operationLog/list`,
props
);