diff --git a/src/pages/api/chat/vectorGpt.ts b/src/pages/api/chat/vectorGpt.ts index b97d22a66..1507aa13b 100644 --- a/src/pages/api/chat/vectorGpt.ts +++ b/src/pages/api/chat/vectorGpt.ts @@ -119,20 +119,20 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse) .select('text q') .then((res) => { if (!res) return ''; - const questions = res.q.map((item) => item.text).join(' '); + // const questions = res.q.map((item) => item.text).join(' '); const answer = res.text; - return `${questions} ${answer}`; + return `${answer}`; }); }) ) ).filter((item) => item); // textArr 筛选,最多 3000 tokens - const systemPrompt = systemPromptFilter(textArr, 2800); + const systemPrompt = systemPromptFilter(textArr, 3400); prompts.unshift({ obj: 'SYSTEM', - value: `根据下面的知识回答问题: ${systemPrompt}` + value: `${model.systemPrompt}。 我的知识库: "${systemPrompt}"` }); // 控制在 tokens 数量,防止超出 diff --git a/src/pages/chat/index.tsx b/src/pages/chat/index.tsx index f2cef7b0e..b432e6978 100644 --- a/src/pages/chat/index.tsx +++ b/src/pages/chat/index.tsx @@ -190,91 +190,97 @@ const Chat = ({ chatId }: { chatId: string }) => { /** * 发送一个内容 */ - const sendPrompt = useCallback(async () => { - const storeInput = inputVal; - // 去除空行 - const val = inputVal - .trim() - .split('\n') - .filter((val) => val) - .join('\n'); - if (!chatData?.modelId || !val || !ChatBox.current || isChatting) { - return; - } + const sendPrompt = useCallback( + async (e?: React.MouseEvent) => { + e?.stopPropagation(); + e?.preventDefault(); - // 长度校验 - const tokens = encode(val).length; - const model = modelList.find((item) => item.model === chatData.modelName); - - if (model && tokens >= model.maxToken) { - toast({ - title: '单次输入超出 4000 tokens', - status: 'warning' - }); - return; - } - - const newChatList: ChatSiteItemType[] = [ - ...chatData.history, - { - obj: 'Human', - value: val, - status: 'finish' - }, - { - obj: 'AI', - value: '', - status: 'loading' + const storeInput = inputVal; + // 去除空行 + const val = inputVal + .trim() + .split('\n') + .filter((val) => val) + .join('\n'); + if (!chatData?.modelId || !val || !ChatBox.current || isChatting) { + return; } - ]; - // 插入内容 - setChatData((state) => ({ - ...state, - history: newChatList - })); + // 长度校验 + const tokens = encode(val).length; + const model = modelList.find((item) => item.model === chatData.modelName); - // 清空输入内容 - resetInputVal(''); - scrollToBottom(); - - try { - await gptChatPrompt(newChatList[newChatList.length - 2]); - - // 如果是 Human 第一次发送,插入历史记录 - const humanChat = newChatList.filter((item) => item.obj === 'Human'); - if (humanChat.length === 1) { - pushChatHistory({ - chatId, - title: humanChat[0].value + if (model && tokens >= model.maxToken) { + toast({ + title: '单次输入超出 4000 tokens', + status: 'warning' }); + return; } - } catch (err: any) { - toast({ - title: typeof err === 'string' ? err : err?.message || '聊天出错了~', - status: 'warning', - duration: 5000, - isClosable: true - }); - resetInputVal(storeInput); + const newChatList: ChatSiteItemType[] = [ + ...chatData.history, + { + obj: 'Human', + value: val, + status: 'finish' + }, + { + obj: 'AI', + value: '', + status: 'loading' + } + ]; + // 插入内容 setChatData((state) => ({ ...state, - history: newChatList.slice(0, newChatList.length - 2) + history: newChatList })); - } - }, [ - inputVal, - chatData, - isChatting, - resetInputVal, - scrollToBottom, - toast, - gptChatPrompt, - pushChatHistory, - chatId - ]); + + // 清空输入内容 + resetInputVal(''); + scrollToBottom(); + + try { + await gptChatPrompt(newChatList[newChatList.length - 2]); + + // 如果是 Human 第一次发送,插入历史记录 + const humanChat = newChatList.filter((item) => item.obj === 'Human'); + if (humanChat.length === 1) { + pushChatHistory({ + chatId, + title: humanChat[0].value + }); + } + } catch (err: any) { + toast({ + title: typeof err === 'string' ? err : err?.message || '聊天出错了~', + status: 'warning', + duration: 5000, + isClosable: true + }); + + resetInputVal(storeInput); + + setChatData((state) => ({ + ...state, + history: newChatList.slice(0, newChatList.length - 2) + })); + } + }, + [ + inputVal, + chatData, + isChatting, + resetInputVal, + scrollToBottom, + toast, + gptChatPrompt, + pushChatHistory, + chatId + ] + ); // 删除一句话 const delChatRecord = useCallback( diff --git a/src/pages/model/detail/components/InputDataModal.tsx b/src/pages/model/detail/components/InputDataModal.tsx index 43c7f5d46..bb0757320 100644 --- a/src/pages/model/detail/components/InputDataModal.tsx +++ b/src/pages/model/detail/components/InputDataModal.tsx @@ -19,7 +19,7 @@ import { DeleteIcon } from '@chakra-ui/icons'; import { customAlphabet } from 'nanoid'; const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12); -type FormData = { text: string; q: { val: string }[] }; +type FormData = { text: string; q: string }; const InputDataModal = ({ onClose, @@ -36,17 +36,9 @@ const InputDataModal = ({ const { register, handleSubmit, control } = useForm({ defaultValues: { text: '', - q: [{ val: '' }] + q: '' } }); - const { - fields: inputQ, - append: appendQ, - remove: removeQ - } = useFieldArray({ - control, - name: 'q' - }); const sureImportData = useCallback( async (e: FormData) => { @@ -58,10 +50,12 @@ const InputDataModal = ({ data: [ { text: e.text, - q: e.q.map((item) => ({ - id: nanoid(), - text: item.val - })) + q: [ + { + id: nanoid(), + text: e.q + } + ] } ] }); @@ -81,50 +75,46 @@ const InputDataModal = ({ ); return ( - + - + 手动导入 - - 知识点: -