4.8.21 feature (#3742)

* model config

* feat: normalization embedding

* adapt unstrea  reasoning response

* remove select app

* perf: dataset search code

* fix: multiple audio video show

* perf: query extension output

* perf: link check

* perf: faq doc

* fix: ts

* feat: support reasoning text output

* feat: workflow support reasoning output
This commit is contained in:
Archer
2025-02-11 13:53:08 +08:00
committed by GitHub
parent 896a3f1472
commit 8738c32fb0
45 changed files with 640 additions and 570 deletions

View File

@@ -1,9 +1,10 @@
import React, { useEffect } from 'react';
import React, { useEffect, useRef } from 'react';
import { Box } from '@chakra-ui/react';
import { useMarkdownWidth } from '../hooks';
const AudioBlock = ({ code: audioUrl }: { code: string }) => {
const { width, Ref } = useMarkdownWidth();
const audioRef = useRef<HTMLAudioElement>(null);
useEffect(() => {
fetch(audioUrl?.trim(), {
@@ -13,8 +14,7 @@ const AudioBlock = ({ code: audioUrl }: { code: string }) => {
.then((response) => response.blob())
.then((blob) => {
const url = URL.createObjectURL(blob);
const audio = document.getElementById('player');
audio?.setAttribute('src', url);
audioRef?.current?.setAttribute('src', url);
})
.catch((err) => {
console.log(err);
@@ -22,8 +22,8 @@ const AudioBlock = ({ code: audioUrl }: { code: string }) => {
}, [audioUrl]);
return (
<Box w={width} ref={Ref}>
<audio id="player" controls style={{ width: '100%' }} />
<Box w={width} ref={Ref} my={4}>
<audio ref={audioRef} controls style={{ width: '100%' }} />
</Box>
);
};

View File

@@ -1,9 +1,10 @@
import React, { useEffect } from 'react';
import React, { useEffect, useRef } from 'react';
import { Box } from '@chakra-ui/react';
import { useMarkdownWidth } from '../hooks';
const VideoBlock = ({ code: videoUrl }: { code: string }) => {
const { width, Ref } = useMarkdownWidth();
const videoRef = useRef<HTMLVideoElement>(null);
useEffect(() => {
fetch(videoUrl?.trim(), {
@@ -13,8 +14,7 @@ const VideoBlock = ({ code: videoUrl }: { code: string }) => {
.then((response) => response.blob())
.then((blob) => {
const url = URL.createObjectURL(blob);
const video = document.getElementById('player');
video?.setAttribute('src', url);
videoRef?.current?.setAttribute('src', url);
})
.catch((err) => {
console.log(err);
@@ -22,8 +22,8 @@ const VideoBlock = ({ code: videoUrl }: { code: string }) => {
}, [videoUrl]);
return (
<Box w={width} ref={Ref}>
<video id="player" controls />
<Box w={width} ref={Ref} my={4} borderRadius={'md'} overflow={'hidden'}>
<video ref={videoRef} controls />
</Box>
);
};

View File

@@ -58,10 +58,10 @@ const MarkdownRender = ({ source = '', showAnimation, isDisabled, forbidZhFormat
// 保护 URL 格式https://, http://, /api/xxx
const urlPlaceholders: string[] = [];
const textWithProtectedUrls = source.replace(
/(https?:\/\/[^\s<]+[^<.,:;"')\]\s]|\/api\/[^\s]+)(?=\s|$)/g,
/https?:\/\/(?:(?:[\w-]+\.)+[a-zA-Z]{2,6}|localhost)(?::\d{2,5})?(?:\/[\w\-./?%&=@]*)?/g,
(match) => {
urlPlaceholders.push(match);
return `__URL_${urlPlaceholders.length - 1}__`;
return `__URL_${urlPlaceholders.length - 1}__ `;
}
);
@@ -73,14 +73,14 @@ const MarkdownRender = ({ source = '', showAnimation, isDisabled, forbidZhFormat
)
// 处理引用标记
.replace(/\n*(\[QUOTE SIGN\]\(.*\))/g, '$1')
// 处理 [quote:id] 格式引用,将 [quote:675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a]()
// 处理 [quote:id] 格式引用,将 [quote:675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a](QUOTE)
.replace(/\[quote:?\s*([a-f0-9]{24})\](?!\()/gi, '[$1](QUOTE)')
.replace(/\[([a-f0-9]{24})\](?!\()/g, '[$1](QUOTE)');
// 还原 URL
const finalText = textWithSpaces.replace(
/__URL_(\d+)__/g,
(_, index) => urlPlaceholders[parseInt(index)]
(_, index) => `${urlPlaceholders[parseInt(index)]}`
);
return finalText;

View File

@@ -99,6 +99,7 @@ const SettingLLMModel = ({
<AISettingModal
onClose={onCloseAIChatSetting}
onSuccess={(e) => {
console.log(e);
onChange(e);
onCloseAIChatSetting();
}}

View File

@@ -46,10 +46,11 @@ const TTSSelect = ({
</HStack>
),
value: model.model,
children: model.voices.map((voice) => ({
label: voice.label,
value: voice.value
}))
children:
model.voices?.map((voice) => ({
label: voice.label,
value: voice.value
})) || []
};
})
],

View File

@@ -226,7 +226,7 @@ const ChatBox = ({
status,
moduleName: name
};
} else if (event === SseResponseEventEnum.answer && reasoningText) {
} else if (reasoningText) {
if (lastValue.type === ChatItemValueTypeEnum.reasoning && lastValue.reasoning) {
lastValue.reasoning.content += reasoningText;
return {

View File

@@ -194,6 +194,7 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.module maxToken')}
value={activeModule?.maxToken}
/>
<Row label={t('chat:reasoning_text')} value={activeModule?.reasoningText} />
<Row
label={t('common:core.chat.response.module historyPreview')}
rawDom={
@@ -238,6 +239,22 @@ export const WholeResponseContent = ({
label={t('common:core.chat.response.search using reRank')}
value={`${activeModule?.searchUsingReRank}`}
/>
{activeModule.queryExtensionResult && (
<>
<Row
label={t('common:core.chat.response.Extension model')}
value={activeModule.queryExtensionResult.model}
/>
<Row
label={t('chat:query_extension_IO_tokens')}
value={`${activeModule.queryExtensionResult.inputTokens}/${activeModule.queryExtensionResult.outputTokens}`}
/>
<Row
label={t('common:support.wallet.usage.Extension result')}
value={activeModule.queryExtensionResult.query}
/>
</>
)}
<Row
label={t('common:core.chat.response.Extension model')}
value={activeModule?.extensionModel}

View File

@@ -1,5 +1,5 @@
import { getCaptchaPic } from '@/web/support/user/api';
import { Button, Input, Image, ModalBody, ModalFooter, Skeleton } from '@chakra-ui/react';
import { Button, Input, ModalBody, ModalFooter, Skeleton } from '@chakra-ui/react';
import MyImage from '@fastgpt/web/components/common/Image/MyImage';
import MyModal from '@fastgpt/web/components/common/MyModal';
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
@@ -16,7 +16,7 @@ const SendCodeAuthModal = ({
onClose: () => void;
onSending: boolean;
onSendCode: (params_0: { username: string; captcha: string }) => Promise<void>;
onSendCode: (e: { username: string; captcha: string }) => Promise<void>;
}) => {
const { t } = useTranslation();
@@ -63,11 +63,16 @@ const SendCodeAuthModal = ({
</Button>
<Button
isLoading={onSending}
onClick={handleSubmit(({ code }) => {
return onSendCode({ username, captcha: code }).then(() => {
onClose();
});
})}
onClick={handleSubmit(
({ code }) => {
return onSendCode({ username, captcha: code }).then(() => {
onClose();
});
},
(err) => {
console.log(err);
}
)}
>
{t('common:common.Confirm')}
</Button>

View File

@@ -64,9 +64,15 @@ export type SearchTestProps = {
[NodeInputKeyEnum.datasetMaxTokens]?: number;
[NodeInputKeyEnum.datasetSearchMode]?: `${DatasetSearchModeEnum}`;
[NodeInputKeyEnum.datasetSearchUsingReRank]?: boolean;
[NodeInputKeyEnum.datasetSearchUsingExtensionQuery]?: boolean;
[NodeInputKeyEnum.datasetSearchExtensionModel]?: string;
[NodeInputKeyEnum.datasetSearchExtensionBg]?: string;
[NodeInputKeyEnum.datasetDeepSearch]?: boolean;
[NodeInputKeyEnum.datasetDeepSearchModel]?: string;
[NodeInputKeyEnum.datasetDeepSearchMaxTimes]?: number;
[NodeInputKeyEnum.datasetDeepSearchBg]?: string;
};
export type SearchTestResponse = {
list: SearchDataResponseItemType[];

View File

@@ -23,7 +23,6 @@ import PromptEditor from '@fastgpt/web/components/common/Textarea/PromptEditor';
import { formatEditorVariablePickerIcon } from '@fastgpt/global/core/workflow/utils';
import SearchParamsTip from '@/components/core/dataset/SearchParamsTip';
import SettingLLMModel from '@/components/core/ai/SettingLLMModel';
import type { SettingAIDataType } from '@fastgpt/global/core/app/type.d';
import { TTSTypeEnum } from '@/web/core/app/constants';
import { workflowSystemVariables } from '@/web/core/app/utils';
import { useContextSelector } from 'use-context-selector';
@@ -164,12 +163,13 @@ const EditForm = ({
aiChatResponseFormat: appForm.aiSettings.aiChatResponseFormat,
aiChatJsonSchema: appForm.aiSettings.aiChatJsonSchema
}}
onChange={({ maxHistories = 6, aiChatReasoning = true, ...data }) => {
onChange={({ maxHistories = 6, ...data }) => {
setAppForm((state) => ({
...state,
aiSettings: {
...state.aiSettings,
...data
...data,
maxHistories
}
}));
}}

View File

@@ -106,10 +106,12 @@ const InputTypeConfig = ({
...listValue[index]
}));
const valueTypeSelectList = Object.values(FlowValueTypeMap).map((item) => ({
label: t(item.label as any),
value: item.value
}));
const valueTypeSelectList = Object.values(FlowValueTypeMap)
.filter((item) => !item.abandon)
.map((item) => ({
label: t(item.label as any),
value: item.value
}));
const showValueTypeSelect =
inputType === FlowNodeInputTypeEnum.reference ||

View File

@@ -66,9 +66,6 @@ const NodePluginConfig = ({ data, selected }: NodeProps<FlowNodeItemType>) => {
>
<Container w={'360px'}>
<Instruction {...componentsProps} />
<Box pt={4}>
<FileSelectConfig {...componentsProps} />
</Box>
</Container>
</NodeCard>
);

View File

@@ -93,7 +93,9 @@ export const useReference = ({
),
value: node.nodeId,
children: filterWorkflowNodeOutputsByType(node.outputs, valueType)
.filter((output) => output.id !== NodeOutputKeyEnum.addOutputParam)
.filter(
(output) => output.id !== NodeOutputKeyEnum.addOutputParam && output.invalid !== true
)
.map((output) => {
return {
label: t(output.label as any),

View File

@@ -13,7 +13,7 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
(e: SettingAIDataType) => {
for (const key in e) {
const input = inputs.find((input) => input.key === key);
input &&
if (input) {
onChangeNode({
nodeId,
type: 'updateInput',
@@ -24,6 +24,7 @@ const SelectAiModelRender = ({ item, inputs = [], nodeId }: RenderInputProps) =>
value: e[key]
}
});
}
}
},
[inputs, nodeId, onChangeNode]

View File

@@ -1,4 +1,4 @@
import React, { useMemo, useState } from 'react';
import React, { useEffect, useMemo, useState } from 'react';
import type { FlowNodeOutputItemType } from '@fastgpt/global/core/workflow/type/io.d';
import { Box, Button, Flex } from '@chakra-ui/react';
import { FlowNodeOutputTypeEnum } from '@fastgpt/global/core/workflow/node/constant';
@@ -14,6 +14,7 @@ import QuestionTip from '@fastgpt/web/components/common/MyTooltip/QuestionTip';
import FormLabel from '@fastgpt/web/components/common/MyBox/FormLabel';
import dynamic from 'next/dynamic';
import { defaultOutput } from './FieldEditModal';
import { useSystemStore } from '@/web/common/system/useSystemStore';
const FieldEditModal = dynamic(() => import('./FieldEditModal'));
@@ -25,6 +26,7 @@ const RenderOutput = ({
flowOutputList: FlowNodeOutputItemType[];
}) => {
const { t } = useTranslation();
const { llmModelList } = useSystemStore();
const onChangeNode = useContextSelector(WorkflowContext, (v) => v.onChangeNode);
const outputString = useMemo(() => JSON.stringify(flowOutputList), [flowOutputList]);
@@ -32,6 +34,32 @@ const RenderOutput = ({
return JSON.parse(outputString) as FlowNodeOutputItemType[];
}, [outputString]);
// Condition check
const inputs = useContextSelector(WorkflowContext, (v) => {
const node = v.nodeList.find((node) => node.nodeId === nodeId);
return JSON.stringify(node?.inputs);
});
useEffect(() => {
flowOutputList.forEach((output) => {
if (!output.invalidCondition || !inputs) return;
const parsedInputs = JSON.parse(inputs);
const invalid = output.invalidCondition({
inputs: parsedInputs,
llmModelList
});
onChangeNode({
nodeId,
type: 'replaceOutput',
key: output.key,
value: {
...output,
invalid
}
});
});
}, [copyOutputs, nodeId, inputs, llmModelList]);
const [editField, setEditField] = useState<FlowNodeOutputItemType>();
const RenderDynamicOutputs = useMemo(() => {
@@ -129,12 +157,14 @@ const RenderOutput = ({
return (
<>
{renderOutputs.map((output, i) => {
return output.label ? (
return output.label && output.invalid !== true ? (
<FormLabel
key={output.key}
required={output.required}
mb={i === renderOutputs.length - 1 ? 0 : 4}
position={'relative'}
_notLast={{
mb: 4
}}
>
<OutputLabel nodeId={nodeId} output={output} />
</FormLabel>

View File

@@ -125,7 +125,12 @@ export const getEditorVariables = ({
: sourceNodes
.map((node) => {
return node.outputs
.filter((output) => !!output.label && output.id !== NodeOutputKeyEnum.addOutputParam)
.filter(
(output) =>
!!output.label &&
output.invalid !== true &&
output.id !== NodeOutputKeyEnum.addOutputParam
)
.map((output) => {
return {
label: t((output.label as any) || ''),

View File

@@ -28,12 +28,15 @@ function Error() {
return (
<Box whiteSpace={'pre-wrap'}>
{`出现未捕获的异常。
1. 私有部署用户90%由于配置文件不正确/模型未启用导致。请确保系统内每个系列模型至少有一个可用
1. 私有部署用户90%由于模型配置不正确/模型未启用导致。。
2. 部分系统不兼容相关API。大部分是苹果的safari 浏览器导致,可以尝试更换 chrome。
3. 请关闭浏览器翻译功能,部分翻译导致页面崩溃。
排除3后打开控制台的 console 查看具体报错信息。
如果提示 xxx undefined 的话,就是配置文件有错误,或者是缺少可用模型。
如果提示 xxx undefined 的话,就是模型配置不正确,检查:
1. 请确保系统内每个系列模型至少有一个可用,可以在【账号-模型提供商】中检查。
2. 请确保至少有一个知识库文件处理模型(语言模型中有一个开关),否则知识库创建会报错。
2. 检查模型中一些“对象”参数是否异常(数组和对象),如果为空,可以尝试给个空数组或空对象。
`}
</Box>
);

View File

@@ -1,12 +1,12 @@
import type { NextApiRequest } from 'next';
import type { SearchTestProps } from '@/global/core/dataset/api.d';
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
import { authDataset } from '@fastgpt/service/support/permission/dataset/auth';
import { pushGenerateVectorUsage } from '@/service/support/wallet/usage/push';
import { searchDatasetData } from '@fastgpt/service/core/dataset/search/controller';
import {
deepRagSearch,
defaultSearchDatasetData
} from '@fastgpt/service/core/dataset/search/controller';
import { updateApiKeyUsage } from '@fastgpt/service/support/openapi/tools';
import { UsageSourceEnum } from '@fastgpt/global/support/wallet/usage/constants';
import { getLLMModel } from '@fastgpt/service/core/ai/model';
import { datasetSearchQueryExtension } from '@fastgpt/service/core/dataset/search/utils';
import {
checkTeamAIPoints,
checkTeamReRankPermission
@@ -15,9 +15,9 @@ import { NextAPI } from '@/service/middleware/entry';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { useIPFrequencyLimit } from '@fastgpt/service/common/middle/reqFrequencyLimit';
import { agentSearchDatasetData } from '@fastgpt/service/core/dataset/search/agent';
import { ApiRequestProps } from '@fastgpt/service/type/next';
async function handler(req: NextApiRequest) {
async function handler(req: ApiRequestProps<SearchTestProps>): Promise<SearchTestResponse> {
const {
datasetId,
text,
@@ -26,10 +26,15 @@ async function handler(req: NextApiRequest) {
searchMode,
usingReRank,
datasetSearchUsingExtensionQuery = true,
datasetSearchUsingExtensionQuery = false,
datasetSearchExtensionModel,
datasetSearchExtensionBg = ''
} = req.body as SearchTestProps;
datasetSearchExtensionBg,
datasetDeepSearch = false,
datasetDeepSearchModel,
datasetDeepSearchMaxTimes,
datasetDeepSearchBg
} = req.body;
if (!datasetId || !text) {
return Promise.reject(CommonErrEnum.missingParams);
@@ -48,29 +53,31 @@ async function handler(req: NextApiRequest) {
// auth balance
await checkTeamAIPoints(teamId);
// query extension
const extensionModel =
datasetSearchUsingExtensionQuery && datasetSearchExtensionModel
? getLLMModel(datasetSearchExtensionModel)
: undefined;
const { concatQueries, rewriteQuery, aiExtensionResult } = await datasetSearchQueryExtension({
query: text,
extensionModel,
extensionBg: datasetSearchExtensionBg
});
const { searchRes, tokens, ...result } = await searchDatasetData({
const searchData = {
histories: [],
teamId,
reRankQuery: rewriteQuery,
queries: concatQueries,
reRankQuery: text,
queries: [text],
model: dataset.vectorModel,
limit: Math.min(limit, 20000),
similarity,
datasetIds: [datasetId],
searchMode,
usingReRank: usingReRank && (await checkTeamReRankPermission(teamId))
});
};
const { searchRes, tokens, queryExtensionResult, deepSearchResult, ...result } = datasetDeepSearch
? await deepRagSearch({
...searchData,
datasetDeepSearchModel,
datasetDeepSearchMaxTimes,
datasetDeepSearchBg
})
: await defaultSearchDatasetData({
...searchData,
datasetSearchUsingExtensionQuery,
datasetSearchExtensionModel,
datasetSearchExtensionBg
});
// push bill
const { totalPoints } = pushGenerateVectorUsage({
@@ -80,12 +87,16 @@ async function handler(req: NextApiRequest) {
model: dataset.vectorModel,
source: apikey ? UsageSourceEnum.api : UsageSourceEnum.fastgpt,
...(aiExtensionResult &&
extensionModel && {
extensionModel: extensionModel.name,
extensionInputTokens: aiExtensionResult.inputTokens,
extensionOutputTokens: aiExtensionResult.outputTokens
})
...(queryExtensionResult && {
extensionModel: queryExtensionResult.model,
extensionInputTokens: queryExtensionResult.inputTokens,
extensionOutputTokens: queryExtensionResult.outputTokens
}),
...(deepSearchResult && {
deepSearchModel: deepSearchResult.model,
deepSearchInputTokens: deepSearchResult.inputTokens,
deepSearchOutputTokens: deepSearchResult.outputTokens
})
});
if (apikey) {
updateApiKeyUsage({
@@ -97,7 +108,7 @@ async function handler(req: NextApiRequest) {
return {
list: searchRes,
duration: `${((Date.now() - start) / 1000).toFixed(3)}s`,
queryExtensionModel: aiExtensionResult?.model,
queryExtensionModel: queryExtensionResult?.model,
...result
};
}

View File

@@ -81,7 +81,7 @@ const Login = ({ ChineseRedirectUrl }: { ChineseRedirectUrl: string }) => {
router.push(navigateTo);
}, 300);
},
[lastRoute, router, setUserInfo]
[lastRoute, router, setUserInfo, llmModelList]
);
const DynamicComponent = useMemo(() => {

View File

@@ -95,7 +95,10 @@ export const pushGenerateVectorUsage = ({
source = UsageSourceEnum.fastgpt,
extensionModel,
extensionInputTokens,
extensionOutputTokens
extensionOutputTokens,
deepSearchModel,
deepSearchInputTokens,
deepSearchOutputTokens
}: {
billId?: string;
teamId: string;
@@ -107,6 +110,10 @@ export const pushGenerateVectorUsage = ({
extensionModel?: string;
extensionInputTokens?: number;
extensionOutputTokens?: number;
deepSearchModel?: string;
deepSearchInputTokens?: number;
deepSearchOutputTokens?: number;
}) => {
const { totalPoints: totalVector, modelName: vectorModelName } = formatModelChars2Points({
modelType: ModelTypeEnum.embedding,
@@ -131,8 +138,25 @@ export const pushGenerateVectorUsage = ({
extensionModelName: modelName
};
})();
const { deepSearchTotalPoints, deepSearchModelName } = (() => {
if (!deepSearchModel || !deepSearchInputTokens)
return {
deepSearchTotalPoints: 0,
deepSearchModelName: ''
};
const { totalPoints, modelName } = formatModelChars2Points({
modelType: ModelTypeEnum.llm,
model: deepSearchModel,
inputTokens: deepSearchInputTokens,
outputTokens: deepSearchOutputTokens
});
return {
deepSearchTotalPoints: totalPoints,
deepSearchModelName: modelName
};
})();
const totalPoints = totalVector + extensionTotalPoints;
const totalPoints = totalVector + extensionTotalPoints + deepSearchTotalPoints;
// 插入 Bill 记录
if (billId) {
@@ -148,12 +172,12 @@ export const pushGenerateVectorUsage = ({
createUsage({
teamId,
tmbId,
appName: 'support.wallet.moduleName.index',
appName: i18nT('common:support.wallet.moduleName.index'),
totalPoints,
source,
list: [
{
moduleName: 'support.wallet.moduleName.index',
moduleName: i18nT('common:support.wallet.moduleName.index'),
amount: totalVector,
model: vectorModelName,
inputTokens
@@ -161,13 +185,24 @@ export const pushGenerateVectorUsage = ({
...(extensionModel !== undefined
? [
{
moduleName: 'core.module.template.Query extension',
moduleName: i18nT('common:core.module.template.Query extension'),
amount: extensionTotalPoints,
model: extensionModelName,
inputTokens: extensionInputTokens,
outputTokens: extensionOutputTokens
}
]
: []),
...(deepSearchModel !== undefined
? [
{
moduleName: i18nT('common:deep_rag_search'),
amount: deepSearchTotalPoints,
model: deepSearchModelName,
inputTokens: deepSearchInputTokens,
outputTokens: deepSearchOutputTokens
}
]
: [])
]
});

View File

@@ -179,6 +179,12 @@ export const streamFetch = ({
})();
// console.log(parseJson, event);
if (event === SseResponseEventEnum.answer) {
const reasoningText = parseJson.choices?.[0]?.delta?.reasoning_content || '';
onMessage({
event,
reasoningText
});
const text = parseJson.choices?.[0]?.delta?.content || '';
for (const item of text) {
pushDataToQueue({
@@ -186,13 +192,13 @@ export const streamFetch = ({
text: item
});
}
} else if (event === SseResponseEventEnum.fastAnswer) {
const reasoningText = parseJson.choices?.[0]?.delta?.reasoning_content || '';
onMessage({
event,
reasoningText
});
} else if (event === SseResponseEventEnum.fastAnswer) {
const text = parseJson.choices?.[0]?.delta?.content || '';
pushDataToQueue({
event,