mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-21 11:43:56 +00:00
Feat: Quote auth (#4715)
* fix outlink quote number auth (#4705) * perf: quote auth * feat: qwen3 config --------- Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
@@ -10,7 +10,7 @@ weight: 792
|
||||
|
||||
## 🚀 新增内容
|
||||
|
||||
1. 支持 Streamable http mcp 模式,无需额外安装 mcp_server 即可以 MCP 方式对外提供 fastgpt 调用。
|
||||
1. qwen3 模型预设
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
|
@@ -2,14 +2,14 @@
|
||||
"provider": "Qwen",
|
||||
"list": [
|
||||
{
|
||||
"model": "qwen-turbo",
|
||||
"name": "Qwen-turbo",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"model": "qwen-vl-plus",
|
||||
"name": "qwen-vl-plus",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
@@ -19,12 +19,9 @@
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
"showStopSign": true
|
||||
},
|
||||
{
|
||||
"model": "qwen-plus",
|
||||
@@ -52,14 +49,14 @@
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen-vl-plus",
|
||||
"name": "qwen-vl-plus",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 2000,
|
||||
"quoteMaxToken": 20000,
|
||||
"maxTemperature": 1.2,
|
||||
"vision": true,
|
||||
"toolChoice": false,
|
||||
"model": "qwen-turbo",
|
||||
"name": "Qwen-turbo",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
@@ -69,10 +66,14 @@
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
|
||||
{
|
||||
"model": "qwen-max",
|
||||
"name": "Qwen-max",
|
||||
@@ -122,6 +123,214 @@
|
||||
"showTopP": true,
|
||||
"showStopSign": true
|
||||
},
|
||||
{
|
||||
"model": "qwen3-235b-a22b",
|
||||
"name": "qwen3-235b-a22b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-32b",
|
||||
"name": "qwen3-32b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-30b-a3b",
|
||||
"name": "qwen3-30b-a3b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-14b",
|
||||
"name": "qwen3-14b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-8b",
|
||||
"name": "qwen3-8b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-4b",
|
||||
"name": "qwen3-4b",
|
||||
"maxContext": 128000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 100000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-1.7b",
|
||||
"name": "qwen3-1.7b",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 30000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwen3-0.6b",
|
||||
"name": "qwen3-0.6b",
|
||||
"maxContext": 32000,
|
||||
"maxResponse": 8000,
|
||||
"quoteMaxToken": 30000,
|
||||
"maxTemperature": 1,
|
||||
"vision": false,
|
||||
"reasoning": true,
|
||||
"toolChoice": true,
|
||||
"functionCall": false,
|
||||
"defaultSystemChatPrompt": "",
|
||||
"datasetProcess": true,
|
||||
"usedInClassify": true,
|
||||
"customCQPrompt": "",
|
||||
"usedInExtractFields": true,
|
||||
"usedInQueryExtension": true,
|
||||
"customExtractPrompt": "",
|
||||
"usedInToolCall": true,
|
||||
"defaultConfig": {},
|
||||
"fieldMap": {},
|
||||
"type": "llm",
|
||||
"showTopP": true,
|
||||
"showStopSign": true,
|
||||
"responseFormatList": ["text", "json_object"]
|
||||
},
|
||||
{
|
||||
"model": "qwq-plus",
|
||||
"name": "qwq-plus",
|
||||
|
@@ -110,6 +110,12 @@ export const loadSystemModels = async (init = false) => {
|
||||
provider: ModelProviderIdType;
|
||||
list: SystemModelItemType[];
|
||||
};
|
||||
const mergeObject = (obj1: any, obj2: any) => {
|
||||
if (!obj1 && !obj2) return undefined;
|
||||
const formatObj1 = typeof obj1 === 'object' ? obj1 : {};
|
||||
const formatObj2 = typeof obj2 === 'object' ? obj2 : {};
|
||||
return { ...formatObj1, ...formatObj2 };
|
||||
};
|
||||
|
||||
fileContent.list.forEach((fileModel) => {
|
||||
const dbModel = dbModels.find((item) => item.model === fileModel.model);
|
||||
@@ -117,6 +123,10 @@ export const loadSystemModels = async (init = false) => {
|
||||
const modelData: any = {
|
||||
...fileModel,
|
||||
...dbModel?.metadata,
|
||||
// @ts-ignore
|
||||
defaultConfig: mergeObject(fileModel.defaultConfig, dbModel?.metadata?.defaultConfig),
|
||||
// @ts-ignore
|
||||
fieldMap: mergeObject(fileModel.fieldMap, dbModel?.metadata?.fieldMap),
|
||||
provider: getModelProvider(dbModel?.metadata?.provider || fileContent.provider).id,
|
||||
type: dbModel?.metadata?.type || fileModel.type,
|
||||
isCustom: false
|
||||
|
@@ -36,7 +36,7 @@
|
||||
"Warning": "Warning",
|
||||
"add_new": "Add New",
|
||||
"add_new_param": "Add new param",
|
||||
"all_quotes": "Quote all",
|
||||
"all_quotes": "All quotes",
|
||||
"app.templateMarket.templateTags.Image_generation": "Image generation",
|
||||
"app.templateMarket.templateTags.Office_services": "Office Services",
|
||||
"app.templateMarket.templateTags.Roleplay": "role play",
|
||||
|
@@ -22,7 +22,7 @@ import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection
|
||||
import Markdown from '.';
|
||||
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
|
||||
|
||||
const A = ({ children, ...props }: any) => {
|
||||
const A = ({ children, chatAuthData, ...props }: any) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||
@@ -52,7 +52,7 @@ const A = ({ children, ...props }: any) => {
|
||||
data: quoteData,
|
||||
loading,
|
||||
runAsync: getQuoteDataById
|
||||
} = useRequest2(getQuoteData, {
|
||||
} = useRequest2((id: string) => getQuoteData({ id, ...chatAuthData }), {
|
||||
manual: true
|
||||
});
|
||||
const sourceData = useMemo(
|
||||
@@ -149,4 +149,4 @@ const A = ({ children, ...props }: any) => {
|
||||
return <Link {...props}>{children}</Link>;
|
||||
};
|
||||
|
||||
export default A;
|
||||
export default React.memo(A);
|
||||
|
@@ -12,6 +12,8 @@ import dynamic from 'next/dynamic';
|
||||
|
||||
import { Box } from '@chakra-ui/react';
|
||||
import { CodeClassNameEnum, mdTextFormat } from './utils';
|
||||
import { useCreation } from 'ahooks';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
|
||||
const CodeLight = dynamic(() => import('./codeBlock/CodeLight'), { ssr: false });
|
||||
const MermaidCodeBlock = dynamic(() => import('./img/MermaidCodeBlock'), { ssr: false });
|
||||
@@ -31,6 +33,11 @@ type Props = {
|
||||
showAnimation?: boolean;
|
||||
isDisabled?: boolean;
|
||||
forbidZhFormat?: boolean;
|
||||
chatAuthData?: {
|
||||
appId: string;
|
||||
chatId: string;
|
||||
chatItemDataId: string;
|
||||
} & OutLinkChatAuthProps;
|
||||
};
|
||||
const Markdown = (props: Props) => {
|
||||
const source = props.source || '';
|
||||
@@ -41,16 +48,21 @@ const Markdown = (props: Props) => {
|
||||
|
||||
return <Box whiteSpace={'pre-wrap'}>{source}</Box>;
|
||||
};
|
||||
const MarkdownRender = ({ source = '', showAnimation, isDisabled, forbidZhFormat }: Props) => {
|
||||
const components = useMemo<any>(
|
||||
() => ({
|
||||
const MarkdownRender = ({
|
||||
source = '',
|
||||
showAnimation,
|
||||
isDisabled,
|
||||
forbidZhFormat,
|
||||
chatAuthData
|
||||
}: Props) => {
|
||||
const components = useCreation(() => {
|
||||
return {
|
||||
img: Image,
|
||||
pre: RewritePre,
|
||||
code: Code,
|
||||
a: A
|
||||
}),
|
||||
[]
|
||||
);
|
||||
a: (props: any) => <A {...props} chatAuthData={chatAuthData} />
|
||||
};
|
||||
}, [chatAuthData]);
|
||||
|
||||
const formatSource = useMemo(() => {
|
||||
if (showAnimation || forbidZhFormat) return source;
|
||||
|
@@ -33,8 +33,7 @@ export const mdTextFormat = (text: string) => {
|
||||
// /([\u4e00-\u9fa5\u3000-\u303f])([a-zA-Z0-9])|([a-zA-Z0-9])([\u4e00-\u9fa5\u3000-\u303f])/g,
|
||||
// '$1$3 $2$4'
|
||||
// )
|
||||
// 处理 [quote:id] 格式引用,将 [quote:675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a](QUOTE)
|
||||
.replace(/\[quote:?\s*([a-f0-9]{24})\](?!\()/gi, '[$1](QUOTE)')
|
||||
// 处理 格式引用,将 [675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a](QUOTE)
|
||||
.replace(/\[([a-f0-9]{24})\](?!\()/g, '[$1](QUOTE)');
|
||||
|
||||
// 处理链接后的中文标点符号,增加空格
|
||||
|
@@ -22,6 +22,7 @@ import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
|
||||
import { getChatResData } from '@/web/core/chat/api';
|
||||
import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
|
||||
import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext';
|
||||
import { useCreation } from 'ahooks';
|
||||
|
||||
export type ChatProviderProps = {
|
||||
appId: string;
|
||||
@@ -128,13 +129,17 @@ export const ChatBoxContext = createContext<useChatStoreType>({
|
||||
const Provider = ({
|
||||
appId,
|
||||
chatId,
|
||||
outLinkAuthData = {},
|
||||
outLinkAuthData,
|
||||
chatType = 'chat',
|
||||
children,
|
||||
...props
|
||||
}: ChatProviderProps & {
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
const formatOutLinkAuth = useCreation(() => {
|
||||
return outLinkAuthData || {};
|
||||
}, [outLinkAuthData]);
|
||||
|
||||
const welcomeText = useContextSelector(
|
||||
ChatItemContext,
|
||||
(v) => v.chatBoxData?.app?.chatConfig?.welcomeText ?? ''
|
||||
@@ -187,7 +192,7 @@ const Provider = ({
|
||||
} = useAudioPlay({
|
||||
appId,
|
||||
ttsConfig,
|
||||
...outLinkAuthData
|
||||
...formatOutLinkAuth
|
||||
});
|
||||
|
||||
const autoTTSResponse =
|
||||
@@ -209,7 +214,7 @@ const Provider = ({
|
||||
appId: appId,
|
||||
chatId: chatId,
|
||||
dataId,
|
||||
...outLinkAuthData
|
||||
...formatOutLinkAuth
|
||||
});
|
||||
setChatRecords((state) =>
|
||||
state.map((item) => (item.dataId === dataId ? { ...item, responseData: resData } : item))
|
||||
@@ -217,7 +222,7 @@ const Provider = ({
|
||||
return resData;
|
||||
}
|
||||
},
|
||||
[chatRecords, chatId, appId, outLinkAuthData, setChatRecords]
|
||||
[chatRecords, chatId, appId, formatOutLinkAuth, setChatRecords]
|
||||
);
|
||||
const value: useChatStoreType = {
|
||||
...props,
|
||||
@@ -243,7 +248,7 @@ const Provider = ({
|
||||
chatInputGuide,
|
||||
appId,
|
||||
chatId,
|
||||
outLinkAuthData,
|
||||
outLinkAuthData: formatOutLinkAuth,
|
||||
getHistoryResponseData,
|
||||
chatType
|
||||
};
|
||||
|
@@ -100,6 +100,7 @@ const AIContentCard = React.memo(function AIContentCard({
|
||||
|
||||
return (
|
||||
<AIResponseBox
|
||||
chatItemDataId={dataId}
|
||||
key={key}
|
||||
value={value}
|
||||
isLastResponseValue={isLastChild && i === chatValue.length - 1}
|
||||
|
@@ -35,6 +35,7 @@ const RenderOutput = () => {
|
||||
const key = `${histories[1].dataId}-ai-${i}`;
|
||||
return (
|
||||
<AIResponseBox
|
||||
chatItemDataId={histories[1].dataId}
|
||||
key={key}
|
||||
value={value}
|
||||
isLastResponseValue={true}
|
||||
|
@@ -31,6 +31,8 @@ import { SelectOptionsComponent, FormInputComponent } from './Interactive/Intera
|
||||
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
|
||||
import { useContextSelector } from 'use-context-selector';
|
||||
import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
|
||||
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
|
||||
import { useCreation } from 'ahooks';
|
||||
|
||||
const accordionButtonStyle = {
|
||||
w: 'auto',
|
||||
@@ -87,24 +89,33 @@ const RenderResoningContent = React.memo(function RenderResoningContent({
|
||||
});
|
||||
const RenderText = React.memo(function RenderText({
|
||||
showAnimation,
|
||||
text
|
||||
text,
|
||||
chatItemDataId
|
||||
}: {
|
||||
showAnimation: boolean;
|
||||
text: string;
|
||||
chatItemDataId: string;
|
||||
}) {
|
||||
const isResponseDetail = useContextSelector(ChatItemContext, (v) => v.isResponseDetail);
|
||||
|
||||
const appId = useContextSelector(ChatBoxContext, (v) => v.appId);
|
||||
const chatId = useContextSelector(ChatBoxContext, (v) => v.chatId);
|
||||
const outLinkAuthData = useContextSelector(ChatBoxContext, (v) => v.outLinkAuthData);
|
||||
|
||||
const source = useMemo(() => {
|
||||
if (!text) return '';
|
||||
|
||||
// Remove quote references if not showing response detail
|
||||
return isResponseDetail ? text : text.replace(/\[[a-f0-9]{24}\]\(QUOTE\)/g, '');
|
||||
return isResponseDetail
|
||||
? text
|
||||
: text.replace(/\[([a-f0-9]{24})\]\(QUOTE\)/g, '').replace(/\[([a-f0-9]{24})\](?!\()/g, '');
|
||||
}, [text, isResponseDetail]);
|
||||
|
||||
// First empty line
|
||||
// if (!source && !isLastChild) return null;
|
||||
const chatAuthData = useCreation(() => {
|
||||
return { appId, chatId, chatItemDataId, ...outLinkAuthData };
|
||||
}, [appId, chatId, chatItemDataId, outLinkAuthData]);
|
||||
|
||||
return <Markdown source={source} showAnimation={showAnimation} />;
|
||||
return <Markdown source={source} showAnimation={showAnimation} chatAuthData={chatAuthData} />;
|
||||
});
|
||||
|
||||
const RenderTool = React.memo(
|
||||
@@ -230,17 +241,23 @@ const RenderUserFormInteractive = React.memo(function RenderFormInput({
|
||||
});
|
||||
|
||||
const AIResponseBox = ({
|
||||
chatItemDataId,
|
||||
value,
|
||||
isLastResponseValue,
|
||||
isChatting
|
||||
}: {
|
||||
chatItemDataId: string;
|
||||
value: UserChatItemValueItemType | AIChatItemValueItemType;
|
||||
isLastResponseValue: boolean;
|
||||
isChatting: boolean;
|
||||
}) => {
|
||||
if (value.type === ChatItemValueTypeEnum.text && value.text) {
|
||||
return (
|
||||
<RenderText showAnimation={isChatting && isLastResponseValue} text={value.text.content} />
|
||||
<RenderText
|
||||
chatItemDataId={chatItemDataId}
|
||||
showAnimation={isChatting && isLastResponseValue}
|
||||
text={value.text.content}
|
||||
/>
|
||||
);
|
||||
}
|
||||
if (value.type === ChatItemValueTypeEnum.reasoning && value.reasoning) {
|
||||
|
@@ -143,6 +143,7 @@ export const ModelEditModal = ({
|
||||
data[key] = '';
|
||||
}
|
||||
}
|
||||
|
||||
return putSystemModel({
|
||||
model: data.model,
|
||||
metadata: data
|
||||
@@ -490,7 +491,7 @@ export const ModelEditModal = ({
|
||||
value={JSON.stringify(getValues('defaultConfig'), null, 2)}
|
||||
onChange={(e) => {
|
||||
if (!e) {
|
||||
setValue('defaultConfig', {});
|
||||
setValue('defaultConfig', undefined);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
@@ -582,12 +583,6 @@ export const ModelEditModal = ({
|
||||
</Td>
|
||||
</Tr>
|
||||
<Tr>
|
||||
<Td>
|
||||
<HStack spacing={1}>
|
||||
<Box>{t('account:model.function_call')}</Box>
|
||||
<QuestionTip label={t('account:model.function_call_tip')} />
|
||||
</HStack>
|
||||
</Td>
|
||||
<Td textAlign={'right'}>
|
||||
<Flex justifyContent={'flex-end'}>
|
||||
<Switch {...register('functionCall')} />
|
||||
@@ -720,12 +715,13 @@ export const ModelEditModal = ({
|
||||
value={JSON.stringify(getValues('defaultConfig'), null, 2)}
|
||||
resize
|
||||
onChange={(e) => {
|
||||
console.log(e, '===');
|
||||
if (!e) {
|
||||
setValue('defaultConfig', {});
|
||||
setValue('defaultConfig', undefined);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
setValue('defaultConfig', JSON.parse(e));
|
||||
setValue('defaultConfig', JSON.parse(e.trim()));
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
|
@@ -36,7 +36,7 @@ async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuot
|
||||
datasetDataIdList
|
||||
} = req.body;
|
||||
|
||||
const [chat, { chatItem }] = await Promise.all([
|
||||
const [{ chat, responseDetail }, { chatItem }] = await Promise.all([
|
||||
authChatCrud({
|
||||
req,
|
||||
authToken: true,
|
||||
@@ -49,7 +49,7 @@ async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuot
|
||||
}),
|
||||
authCollectionInChat({ appId, chatId, chatItemDataId, collectionIds: collectionIdList })
|
||||
]);
|
||||
if (!chat) return Promise.reject(ChatErrEnum.unAuthChat);
|
||||
if (!chat || !responseDetail) return Promise.reject(ChatErrEnum.unAuthChat);
|
||||
|
||||
const list = await MongoDatasetData.find(
|
||||
{ _id: { $in: datasetDataIdList }, collectionId: { $in: collectionIdList } },
|
||||
|
@@ -1,33 +1,105 @@
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { authChatCrud, authCollectionInChat } from '@/service/support/permission/auth/chat';
|
||||
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
|
||||
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
|
||||
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
|
||||
import { CollectionWithDatasetType } from '@fastgpt/global/core/dataset/type';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import { ApiRequestProps } from '@fastgpt/service/type/next';
|
||||
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
|
||||
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
|
||||
|
||||
export type GetQuoteDataResponse = {
|
||||
collection: CollectionWithDatasetType;
|
||||
collection: DatasetCollectionSchemaType;
|
||||
q: string;
|
||||
a: string;
|
||||
};
|
||||
|
||||
async function handler(req: NextApiRequest): Promise<GetQuoteDataResponse> {
|
||||
const { id: dataId } = req.query as {
|
||||
id: string;
|
||||
};
|
||||
export type GetQuoteDataProps =
|
||||
| {
|
||||
id: string;
|
||||
}
|
||||
| ({
|
||||
id: string;
|
||||
appId: string;
|
||||
chatId: string;
|
||||
chatItemDataId: string;
|
||||
} & OutLinkChatAuthProps);
|
||||
|
||||
// 凭证校验
|
||||
const { datasetData, collection } = await authDatasetData({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
dataId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuoteDataResponse> {
|
||||
const { id: dataId } = req.body;
|
||||
|
||||
// Auth
|
||||
const { collection, q, a } = await (async () => {
|
||||
if ('chatId' in req.body) {
|
||||
const { appId, chatId, shareId, outLinkUid, teamId, teamToken, chatItemDataId } = req.body;
|
||||
await authChatCrud({
|
||||
req,
|
||||
authToken: true,
|
||||
appId,
|
||||
chatId,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
});
|
||||
|
||||
const datasetData = await MongoDatasetData.findById(dataId);
|
||||
if (!datasetData) {
|
||||
return Promise.reject('Can not find the data');
|
||||
}
|
||||
|
||||
const [collection, { responseDetail }] = await Promise.all([
|
||||
MongoDatasetCollection.findById(datasetData.collectionId).lean(),
|
||||
authChatCrud({
|
||||
req,
|
||||
authToken: true,
|
||||
appId,
|
||||
chatId,
|
||||
shareId,
|
||||
outLinkUid,
|
||||
teamId,
|
||||
teamToken
|
||||
}),
|
||||
authCollectionInChat({
|
||||
appId,
|
||||
chatId,
|
||||
chatItemDataId,
|
||||
collectionIds: [String(datasetData.collectionId)]
|
||||
})
|
||||
]);
|
||||
if (!collection) {
|
||||
return Promise.reject('Can not find the collection');
|
||||
}
|
||||
if (!responseDetail) {
|
||||
return Promise.reject(ChatErrEnum.unAuthChat);
|
||||
}
|
||||
|
||||
return {
|
||||
collection,
|
||||
q: datasetData.q,
|
||||
a: datasetData.a
|
||||
};
|
||||
} else {
|
||||
const { datasetData, collection } = await authDatasetData({
|
||||
req,
|
||||
authToken: true,
|
||||
authApiKey: true,
|
||||
dataId,
|
||||
per: ReadPermissionVal
|
||||
});
|
||||
return {
|
||||
collection,
|
||||
q: datasetData.q,
|
||||
a: datasetData.a
|
||||
};
|
||||
}
|
||||
})();
|
||||
|
||||
return {
|
||||
collection,
|
||||
q: datasetData.q,
|
||||
a: datasetData.a
|
||||
q,
|
||||
a
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -236,7 +236,7 @@ export const authCollectionInChat = async ({
|
||||
.flat()
|
||||
);
|
||||
|
||||
if (collectionIds.every((id) => quoteListSet.has(id))) {
|
||||
if (collectionIds.every((id) => quoteListSet.has(String(id)))) {
|
||||
return {
|
||||
chatItem
|
||||
};
|
||||
|
@@ -72,6 +72,7 @@ import type {
|
||||
getTrainingErrorResponse
|
||||
} from '@/pages/api/core/dataset/training/getTrainingError';
|
||||
import type { APIFileItem } from '@fastgpt/global/core/dataset/apiDataset';
|
||||
import { GetQuoteDataProps } from '@/pages/api/core/chat/quote/getQuote';
|
||||
|
||||
/* ======================== dataset ======================= */
|
||||
export const getDatasets = (data: GetDatasetListBody) =>
|
||||
@@ -216,8 +217,8 @@ export const delOneDatasetDataById = (id: string) =>
|
||||
DELETE<string>(`/core/dataset/data/delete`, { id });
|
||||
|
||||
// Get quote data
|
||||
export const getQuoteData = (id: string) =>
|
||||
GET<GetQuoteDataResponse>(`/core/dataset/data/getQuoteData`, { id });
|
||||
export const getQuoteData = (data: GetQuoteDataProps) =>
|
||||
POST<GetQuoteDataResponse>(`/core/dataset/data/getQuoteData`, data);
|
||||
|
||||
/* ================ training ==================== */
|
||||
export const postRebuildEmbedding = (data: rebuildEmbeddingBody) =>
|
||||
|
Reference in New Issue
Block a user