Feat: Quote auth (#4715)

* fix outlink quote number auth (#4705)

* perf: quote auth

* feat: qwen3 config

---------

Co-authored-by: heheer <heheer@sealos.io>
This commit is contained in:
Archer
2025-04-29 12:05:04 +08:00
committed by GitHub
parent 26e320b0df
commit 5023da4489
16 changed files with 400 additions and 77 deletions

View File

@@ -10,7 +10,7 @@ weight: 792
## 🚀 新增内容 ## 🚀 新增内容
1. 支持 Streamable http mcp 模式,无需额外安装 mcp_server 即可以 MCP 方式对外提供 fastgpt 调用。 1. qwen3 模型预设
## ⚙️ 优化 ## ⚙️ 优化

View File

@@ -2,14 +2,14 @@
"provider": "Qwen", "provider": "Qwen",
"list": [ "list": [
{ {
"model": "qwen-turbo", "model": "qwen-vl-plus",
"name": "Qwen-turbo", "name": "qwen-vl-plus",
"maxContext": 128000, "maxContext": 32000,
"maxResponse": 8000, "maxResponse": 2000,
"quoteMaxToken": 100000, "quoteMaxToken": 20000,
"maxTemperature": 1, "maxTemperature": 1.2,
"vision": false, "vision": true,
"toolChoice": true, "toolChoice": false,
"functionCall": false, "functionCall": false,
"defaultSystemChatPrompt": "", "defaultSystemChatPrompt": "",
"datasetProcess": true, "datasetProcess": true,
@@ -19,12 +19,9 @@
"usedInQueryExtension": true, "usedInQueryExtension": true,
"customExtractPrompt": "", "customExtractPrompt": "",
"usedInToolCall": true, "usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm", "type": "llm",
"showTopP": true, "showTopP": true,
"showStopSign": true, "showStopSign": true
"responseFormatList": ["text", "json_object"]
}, },
{ {
"model": "qwen-plus", "model": "qwen-plus",
@@ -52,14 +49,14 @@
"responseFormatList": ["text", "json_object"] "responseFormatList": ["text", "json_object"]
}, },
{ {
"model": "qwen-vl-plus", "model": "qwen-turbo",
"name": "qwen-vl-plus", "name": "Qwen-turbo",
"maxContext": 32000, "maxContext": 128000,
"maxResponse": 2000, "maxResponse": 8000,
"quoteMaxToken": 20000, "quoteMaxToken": 100000,
"maxTemperature": 1.2, "maxTemperature": 1,
"vision": true, "vision": false,
"toolChoice": false, "toolChoice": true,
"functionCall": false, "functionCall": false,
"defaultSystemChatPrompt": "", "defaultSystemChatPrompt": "",
"datasetProcess": true, "datasetProcess": true,
@@ -69,10 +66,14 @@
"usedInQueryExtension": true, "usedInQueryExtension": true,
"customExtractPrompt": "", "customExtractPrompt": "",
"usedInToolCall": true, "usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm", "type": "llm",
"showTopP": true, "showTopP": true,
"showStopSign": true "showStopSign": true,
"responseFormatList": ["text", "json_object"]
}, },
{ {
"model": "qwen-max", "model": "qwen-max",
"name": "Qwen-max", "name": "Qwen-max",
@@ -122,6 +123,214 @@
"showTopP": true, "showTopP": true,
"showStopSign": true "showStopSign": true
}, },
{
"model": "qwen3-235b-a22b",
"name": "qwen3-235b-a22b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-32b",
"name": "qwen3-32b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-30b-a3b",
"name": "qwen3-30b-a3b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-14b",
"name": "qwen3-14b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-8b",
"name": "qwen3-8b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-4b",
"name": "qwen3-4b",
"maxContext": 128000,
"maxResponse": 8000,
"quoteMaxToken": 100000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-1.7b",
"name": "qwen3-1.7b",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 30000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{
"model": "qwen3-0.6b",
"name": "qwen3-0.6b",
"maxContext": 32000,
"maxResponse": 8000,
"quoteMaxToken": 30000,
"maxTemperature": 1,
"vision": false,
"reasoning": true,
"toolChoice": true,
"functionCall": false,
"defaultSystemChatPrompt": "",
"datasetProcess": true,
"usedInClassify": true,
"customCQPrompt": "",
"usedInExtractFields": true,
"usedInQueryExtension": true,
"customExtractPrompt": "",
"usedInToolCall": true,
"defaultConfig": {},
"fieldMap": {},
"type": "llm",
"showTopP": true,
"showStopSign": true,
"responseFormatList": ["text", "json_object"]
},
{ {
"model": "qwq-plus", "model": "qwq-plus",
"name": "qwq-plus", "name": "qwq-plus",

View File

@@ -110,6 +110,12 @@ export const loadSystemModels = async (init = false) => {
provider: ModelProviderIdType; provider: ModelProviderIdType;
list: SystemModelItemType[]; list: SystemModelItemType[];
}; };
const mergeObject = (obj1: any, obj2: any) => {
if (!obj1 && !obj2) return undefined;
const formatObj1 = typeof obj1 === 'object' ? obj1 : {};
const formatObj2 = typeof obj2 === 'object' ? obj2 : {};
return { ...formatObj1, ...formatObj2 };
};
fileContent.list.forEach((fileModel) => { fileContent.list.forEach((fileModel) => {
const dbModel = dbModels.find((item) => item.model === fileModel.model); const dbModel = dbModels.find((item) => item.model === fileModel.model);
@@ -117,6 +123,10 @@ export const loadSystemModels = async (init = false) => {
const modelData: any = { const modelData: any = {
...fileModel, ...fileModel,
...dbModel?.metadata, ...dbModel?.metadata,
// @ts-ignore
defaultConfig: mergeObject(fileModel.defaultConfig, dbModel?.metadata?.defaultConfig),
// @ts-ignore
fieldMap: mergeObject(fileModel.fieldMap, dbModel?.metadata?.fieldMap),
provider: getModelProvider(dbModel?.metadata?.provider || fileContent.provider).id, provider: getModelProvider(dbModel?.metadata?.provider || fileContent.provider).id,
type: dbModel?.metadata?.type || fileModel.type, type: dbModel?.metadata?.type || fileModel.type,
isCustom: false isCustom: false

View File

@@ -36,7 +36,7 @@
"Warning": "Warning", "Warning": "Warning",
"add_new": "Add New", "add_new": "Add New",
"add_new_param": "Add new param", "add_new_param": "Add new param",
"all_quotes": "Quote all", "all_quotes": "All quotes",
"app.templateMarket.templateTags.Image_generation": "Image generation", "app.templateMarket.templateTags.Image_generation": "Image generation",
"app.templateMarket.templateTags.Office_services": "Office Services", "app.templateMarket.templateTags.Office_services": "Office Services",
"app.templateMarket.templateTags.Roleplay": "role play", "app.templateMarket.templateTags.Roleplay": "role play",

View File

@@ -22,7 +22,7 @@ import { getCollectionSourceData } from '@fastgpt/global/core/dataset/collection
import Markdown from '.'; import Markdown from '.';
import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils'; import { getSourceNameIcon } from '@fastgpt/global/core/dataset/utils';
const A = ({ children, ...props }: any) => { const A = ({ children, chatAuthData, ...props }: any) => {
const { t } = useTranslation(); const { t } = useTranslation();
const { isOpen, onOpen, onClose } = useDisclosure(); const { isOpen, onOpen, onClose } = useDisclosure();
@@ -52,7 +52,7 @@ const A = ({ children, ...props }: any) => {
data: quoteData, data: quoteData,
loading, loading,
runAsync: getQuoteDataById runAsync: getQuoteDataById
} = useRequest2(getQuoteData, { } = useRequest2((id: string) => getQuoteData({ id, ...chatAuthData }), {
manual: true manual: true
}); });
const sourceData = useMemo( const sourceData = useMemo(
@@ -149,4 +149,4 @@ const A = ({ children, ...props }: any) => {
return <Link {...props}>{children}</Link>; return <Link {...props}>{children}</Link>;
}; };
export default A; export default React.memo(A);

View File

@@ -12,6 +12,8 @@ import dynamic from 'next/dynamic';
import { Box } from '@chakra-ui/react'; import { Box } from '@chakra-ui/react';
import { CodeClassNameEnum, mdTextFormat } from './utils'; import { CodeClassNameEnum, mdTextFormat } from './utils';
import { useCreation } from 'ahooks';
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
const CodeLight = dynamic(() => import('./codeBlock/CodeLight'), { ssr: false }); const CodeLight = dynamic(() => import('./codeBlock/CodeLight'), { ssr: false });
const MermaidCodeBlock = dynamic(() => import('./img/MermaidCodeBlock'), { ssr: false }); const MermaidCodeBlock = dynamic(() => import('./img/MermaidCodeBlock'), { ssr: false });
@@ -31,6 +33,11 @@ type Props = {
showAnimation?: boolean; showAnimation?: boolean;
isDisabled?: boolean; isDisabled?: boolean;
forbidZhFormat?: boolean; forbidZhFormat?: boolean;
chatAuthData?: {
appId: string;
chatId: string;
chatItemDataId: string;
} & OutLinkChatAuthProps;
}; };
const Markdown = (props: Props) => { const Markdown = (props: Props) => {
const source = props.source || ''; const source = props.source || '';
@@ -41,16 +48,21 @@ const Markdown = (props: Props) => {
return <Box whiteSpace={'pre-wrap'}>{source}</Box>; return <Box whiteSpace={'pre-wrap'}>{source}</Box>;
}; };
const MarkdownRender = ({ source = '', showAnimation, isDisabled, forbidZhFormat }: Props) => { const MarkdownRender = ({
const components = useMemo<any>( source = '',
() => ({ showAnimation,
isDisabled,
forbidZhFormat,
chatAuthData
}: Props) => {
const components = useCreation(() => {
return {
img: Image, img: Image,
pre: RewritePre, pre: RewritePre,
code: Code, code: Code,
a: A a: (props: any) => <A {...props} chatAuthData={chatAuthData} />
}), };
[] }, [chatAuthData]);
);
const formatSource = useMemo(() => { const formatSource = useMemo(() => {
if (showAnimation || forbidZhFormat) return source; if (showAnimation || forbidZhFormat) return source;

View File

@@ -33,8 +33,7 @@ export const mdTextFormat = (text: string) => {
// /([\u4e00-\u9fa5\u3000-\u303f])([a-zA-Z0-9])|([a-zA-Z0-9])([\u4e00-\u9fa5\u3000-\u303f])/g, // /([\u4e00-\u9fa5\u3000-\u303f])([a-zA-Z0-9])|([a-zA-Z0-9])([\u4e00-\u9fa5\u3000-\u303f])/g,
// '$1$3 $2$4' // '$1$3 $2$4'
// ) // )
// 处理 [quote:id] 格式引用,将 [quote:675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a](QUOTE) // 处理 格式引用,将 [675934a198f46329dfc6d05a] 转换为 [675934a198f46329dfc6d05a](QUOTE)
.replace(/\[quote:?\s*([a-f0-9]{24})\](?!\()/gi, '[$1](QUOTE)')
.replace(/\[([a-f0-9]{24})\](?!\()/g, '[$1](QUOTE)'); .replace(/\[([a-f0-9]{24})\](?!\()/g, '[$1](QUOTE)');
// 处理链接后的中文标点符号,增加空格 // 处理链接后的中文标点符号,增加空格

View File

@@ -22,6 +22,7 @@ import { VariableInputEnum } from '@fastgpt/global/core/workflow/constants';
import { getChatResData } from '@/web/core/chat/api'; import { getChatResData } from '@/web/core/chat/api';
import { ChatItemContext } from '@/web/core/chat/context/chatItemContext'; import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext'; import { ChatRecordContext } from '@/web/core/chat/context/chatRecordContext';
import { useCreation } from 'ahooks';
export type ChatProviderProps = { export type ChatProviderProps = {
appId: string; appId: string;
@@ -128,13 +129,17 @@ export const ChatBoxContext = createContext<useChatStoreType>({
const Provider = ({ const Provider = ({
appId, appId,
chatId, chatId,
outLinkAuthData = {}, outLinkAuthData,
chatType = 'chat', chatType = 'chat',
children, children,
...props ...props
}: ChatProviderProps & { }: ChatProviderProps & {
children: React.ReactNode; children: React.ReactNode;
}) => { }) => {
const formatOutLinkAuth = useCreation(() => {
return outLinkAuthData || {};
}, [outLinkAuthData]);
const welcomeText = useContextSelector( const welcomeText = useContextSelector(
ChatItemContext, ChatItemContext,
(v) => v.chatBoxData?.app?.chatConfig?.welcomeText ?? '' (v) => v.chatBoxData?.app?.chatConfig?.welcomeText ?? ''
@@ -187,7 +192,7 @@ const Provider = ({
} = useAudioPlay({ } = useAudioPlay({
appId, appId,
ttsConfig, ttsConfig,
...outLinkAuthData ...formatOutLinkAuth
}); });
const autoTTSResponse = const autoTTSResponse =
@@ -209,7 +214,7 @@ const Provider = ({
appId: appId, appId: appId,
chatId: chatId, chatId: chatId,
dataId, dataId,
...outLinkAuthData ...formatOutLinkAuth
}); });
setChatRecords((state) => setChatRecords((state) =>
state.map((item) => (item.dataId === dataId ? { ...item, responseData: resData } : item)) state.map((item) => (item.dataId === dataId ? { ...item, responseData: resData } : item))
@@ -217,7 +222,7 @@ const Provider = ({
return resData; return resData;
} }
}, },
[chatRecords, chatId, appId, outLinkAuthData, setChatRecords] [chatRecords, chatId, appId, formatOutLinkAuth, setChatRecords]
); );
const value: useChatStoreType = { const value: useChatStoreType = {
...props, ...props,
@@ -243,7 +248,7 @@ const Provider = ({
chatInputGuide, chatInputGuide,
appId, appId,
chatId, chatId,
outLinkAuthData, outLinkAuthData: formatOutLinkAuth,
getHistoryResponseData, getHistoryResponseData,
chatType chatType
}; };

View File

@@ -100,6 +100,7 @@ const AIContentCard = React.memo(function AIContentCard({
return ( return (
<AIResponseBox <AIResponseBox
chatItemDataId={dataId}
key={key} key={key}
value={value} value={value}
isLastResponseValue={isLastChild && i === chatValue.length - 1} isLastResponseValue={isLastChild && i === chatValue.length - 1}

View File

@@ -35,6 +35,7 @@ const RenderOutput = () => {
const key = `${histories[1].dataId}-ai-${i}`; const key = `${histories[1].dataId}-ai-${i}`;
return ( return (
<AIResponseBox <AIResponseBox
chatItemDataId={histories[1].dataId}
key={key} key={key}
value={value} value={value}
isLastResponseValue={true} isLastResponseValue={true}

View File

@@ -31,6 +31,8 @@ import { SelectOptionsComponent, FormInputComponent } from './Interactive/Intera
import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils'; import { extractDeepestInteractive } from '@fastgpt/global/core/workflow/runtime/utils';
import { useContextSelector } from 'use-context-selector'; import { useContextSelector } from 'use-context-selector';
import { ChatItemContext } from '@/web/core/chat/context/chatItemContext'; import { ChatItemContext } from '@/web/core/chat/context/chatItemContext';
import { ChatBoxContext } from '../ChatContainer/ChatBox/Provider';
import { useCreation } from 'ahooks';
const accordionButtonStyle = { const accordionButtonStyle = {
w: 'auto', w: 'auto',
@@ -87,24 +89,33 @@ const RenderResoningContent = React.memo(function RenderResoningContent({
}); });
const RenderText = React.memo(function RenderText({ const RenderText = React.memo(function RenderText({
showAnimation, showAnimation,
text text,
chatItemDataId
}: { }: {
showAnimation: boolean; showAnimation: boolean;
text: string; text: string;
chatItemDataId: string;
}) { }) {
const isResponseDetail = useContextSelector(ChatItemContext, (v) => v.isResponseDetail); const isResponseDetail = useContextSelector(ChatItemContext, (v) => v.isResponseDetail);
const appId = useContextSelector(ChatBoxContext, (v) => v.appId);
const chatId = useContextSelector(ChatBoxContext, (v) => v.chatId);
const outLinkAuthData = useContextSelector(ChatBoxContext, (v) => v.outLinkAuthData);
const source = useMemo(() => { const source = useMemo(() => {
if (!text) return ''; if (!text) return '';
// Remove quote references if not showing response detail // Remove quote references if not showing response detail
return isResponseDetail ? text : text.replace(/\[[a-f0-9]{24}\]\(QUOTE\)/g, ''); return isResponseDetail
? text
: text.replace(/\[([a-f0-9]{24})\]\(QUOTE\)/g, '').replace(/\[([a-f0-9]{24})\](?!\()/g, '');
}, [text, isResponseDetail]); }, [text, isResponseDetail]);
// First empty line const chatAuthData = useCreation(() => {
// if (!source && !isLastChild) return null; return { appId, chatId, chatItemDataId, ...outLinkAuthData };
}, [appId, chatId, chatItemDataId, outLinkAuthData]);
return <Markdown source={source} showAnimation={showAnimation} />; return <Markdown source={source} showAnimation={showAnimation} chatAuthData={chatAuthData} />;
}); });
const RenderTool = React.memo( const RenderTool = React.memo(
@@ -230,17 +241,23 @@ const RenderUserFormInteractive = React.memo(function RenderFormInput({
}); });
const AIResponseBox = ({ const AIResponseBox = ({
chatItemDataId,
value, value,
isLastResponseValue, isLastResponseValue,
isChatting isChatting
}: { }: {
chatItemDataId: string;
value: UserChatItemValueItemType | AIChatItemValueItemType; value: UserChatItemValueItemType | AIChatItemValueItemType;
isLastResponseValue: boolean; isLastResponseValue: boolean;
isChatting: boolean; isChatting: boolean;
}) => { }) => {
if (value.type === ChatItemValueTypeEnum.text && value.text) { if (value.type === ChatItemValueTypeEnum.text && value.text) {
return ( return (
<RenderText showAnimation={isChatting && isLastResponseValue} text={value.text.content} /> <RenderText
chatItemDataId={chatItemDataId}
showAnimation={isChatting && isLastResponseValue}
text={value.text.content}
/>
); );
} }
if (value.type === ChatItemValueTypeEnum.reasoning && value.reasoning) { if (value.type === ChatItemValueTypeEnum.reasoning && value.reasoning) {

View File

@@ -143,6 +143,7 @@ export const ModelEditModal = ({
data[key] = ''; data[key] = '';
} }
} }
return putSystemModel({ return putSystemModel({
model: data.model, model: data.model,
metadata: data metadata: data
@@ -490,7 +491,7 @@ export const ModelEditModal = ({
value={JSON.stringify(getValues('defaultConfig'), null, 2)} value={JSON.stringify(getValues('defaultConfig'), null, 2)}
onChange={(e) => { onChange={(e) => {
if (!e) { if (!e) {
setValue('defaultConfig', {}); setValue('defaultConfig', undefined);
return; return;
} }
try { try {
@@ -582,12 +583,6 @@ export const ModelEditModal = ({
</Td> </Td>
</Tr> </Tr>
<Tr> <Tr>
<Td>
<HStack spacing={1}>
<Box>{t('account:model.function_call')}</Box>
<QuestionTip label={t('account:model.function_call_tip')} />
</HStack>
</Td>
<Td textAlign={'right'}> <Td textAlign={'right'}>
<Flex justifyContent={'flex-end'}> <Flex justifyContent={'flex-end'}>
<Switch {...register('functionCall')} /> <Switch {...register('functionCall')} />
@@ -720,12 +715,13 @@ export const ModelEditModal = ({
value={JSON.stringify(getValues('defaultConfig'), null, 2)} value={JSON.stringify(getValues('defaultConfig'), null, 2)}
resize resize
onChange={(e) => { onChange={(e) => {
console.log(e, '===');
if (!e) { if (!e) {
setValue('defaultConfig', {}); setValue('defaultConfig', undefined);
return; return;
} }
try { try {
setValue('defaultConfig', JSON.parse(e)); setValue('defaultConfig', JSON.parse(e.trim()));
} catch (error) { } catch (error) {
console.error(error); console.error(error);
} }

View File

@@ -36,7 +36,7 @@ async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuot
datasetDataIdList datasetDataIdList
} = req.body; } = req.body;
const [chat, { chatItem }] = await Promise.all([ const [{ chat, responseDetail }, { chatItem }] = await Promise.all([
authChatCrud({ authChatCrud({
req, req,
authToken: true, authToken: true,
@@ -49,7 +49,7 @@ async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuot
}), }),
authCollectionInChat({ appId, chatId, chatItemDataId, collectionIds: collectionIdList }) authCollectionInChat({ appId, chatId, chatItemDataId, collectionIds: collectionIdList })
]); ]);
if (!chat) return Promise.reject(ChatErrEnum.unAuthChat); if (!chat || !responseDetail) return Promise.reject(ChatErrEnum.unAuthChat);
const list = await MongoDatasetData.find( const list = await MongoDatasetData.find(
{ _id: { $in: datasetDataIdList }, collectionId: { $in: collectionIdList } }, { _id: { $in: datasetDataIdList }, collectionId: { $in: collectionIdList } },

View File

@@ -1,33 +1,105 @@
import type { NextApiRequest } from 'next';
import { NextAPI } from '@/service/middleware/entry'; import { NextAPI } from '@/service/middleware/entry';
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
import { authChatCrud, authCollectionInChat } from '@/service/support/permission/auth/chat';
import { MongoDatasetData } from '@fastgpt/service/core/dataset/data/schema';
import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant'; import { ReadPermissionVal } from '@fastgpt/global/support/permission/constant';
import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth'; import { authDatasetData } from '@fastgpt/service/support/permission/dataset/auth';
import { CollectionWithDatasetType } from '@fastgpt/global/core/dataset/type'; import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
import { ApiRequestProps } from '@fastgpt/service/type/next';
import { MongoDatasetCollection } from '@fastgpt/service/core/dataset/collection/schema';
import { ChatErrEnum } from '@fastgpt/global/common/error/code/chat';
export type GetQuoteDataResponse = { export type GetQuoteDataResponse = {
collection: CollectionWithDatasetType; collection: DatasetCollectionSchemaType;
q: string; q: string;
a: string; a: string;
}; };
async function handler(req: NextApiRequest): Promise<GetQuoteDataResponse> { export type GetQuoteDataProps =
const { id: dataId } = req.query as { | {
id: string; id: string;
}; }
| ({
id: string;
appId: string;
chatId: string;
chatItemDataId: string;
} & OutLinkChatAuthProps);
// 凭证校验 async function handler(req: ApiRequestProps<GetQuoteDataProps>): Promise<GetQuoteDataResponse> {
const { datasetData, collection } = await authDatasetData({ const { id: dataId } = req.body;
req,
authToken: true, // Auth
authApiKey: true, const { collection, q, a } = await (async () => {
dataId, if ('chatId' in req.body) {
per: ReadPermissionVal const { appId, chatId, shareId, outLinkUid, teamId, teamToken, chatItemDataId } = req.body;
}); await authChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,
teamId,
teamToken
});
const datasetData = await MongoDatasetData.findById(dataId);
if (!datasetData) {
return Promise.reject('Can not find the data');
}
const [collection, { responseDetail }] = await Promise.all([
MongoDatasetCollection.findById(datasetData.collectionId).lean(),
authChatCrud({
req,
authToken: true,
appId,
chatId,
shareId,
outLinkUid,
teamId,
teamToken
}),
authCollectionInChat({
appId,
chatId,
chatItemDataId,
collectionIds: [String(datasetData.collectionId)]
})
]);
if (!collection) {
return Promise.reject('Can not find the collection');
}
if (!responseDetail) {
return Promise.reject(ChatErrEnum.unAuthChat);
}
return {
collection,
q: datasetData.q,
a: datasetData.a
};
} else {
const { datasetData, collection } = await authDatasetData({
req,
authToken: true,
authApiKey: true,
dataId,
per: ReadPermissionVal
});
return {
collection,
q: datasetData.q,
a: datasetData.a
};
}
})();
return { return {
collection, collection,
q: datasetData.q, q,
a: datasetData.a a
}; };
} }

View File

@@ -236,7 +236,7 @@ export const authCollectionInChat = async ({
.flat() .flat()
); );
if (collectionIds.every((id) => quoteListSet.has(id))) { if (collectionIds.every((id) => quoteListSet.has(String(id)))) {
return { return {
chatItem chatItem
}; };

View File

@@ -72,6 +72,7 @@ import type {
getTrainingErrorResponse getTrainingErrorResponse
} from '@/pages/api/core/dataset/training/getTrainingError'; } from '@/pages/api/core/dataset/training/getTrainingError';
import type { APIFileItem } from '@fastgpt/global/core/dataset/apiDataset'; import type { APIFileItem } from '@fastgpt/global/core/dataset/apiDataset';
import { GetQuoteDataProps } from '@/pages/api/core/chat/quote/getQuote';
/* ======================== dataset ======================= */ /* ======================== dataset ======================= */
export const getDatasets = (data: GetDatasetListBody) => export const getDatasets = (data: GetDatasetListBody) =>
@@ -216,8 +217,8 @@ export const delOneDatasetDataById = (id: string) =>
DELETE<string>(`/core/dataset/data/delete`, { id }); DELETE<string>(`/core/dataset/data/delete`, { id });
// Get quote data // Get quote data
export const getQuoteData = (id: string) => export const getQuoteData = (data: GetQuoteDataProps) =>
GET<GetQuoteDataResponse>(`/core/dataset/data/getQuoteData`, { id }); POST<GetQuoteDataResponse>(`/core/dataset/data/getQuoteData`, data);
/* ================ training ==================== */ /* ================ training ==================== */
export const postRebuildEmbedding = (data: rebuildEmbeddingBody) => export const postRebuildEmbedding = (data: rebuildEmbeddingBody) =>