Release update (#1580)

* release doc

* fix: reg metch

* perf: tool call arg

* fix: stream update variables

* remove status

* update prompt

* rename embeddong model
This commit is contained in:
Archer
2024-05-24 11:07:03 +08:00
committed by GitHub
parent 92a3d6d268
commit 9c7c74050b
23 changed files with 119 additions and 93 deletions

View File

@@ -72,7 +72,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
/* start process */
const { flowResponses, flowUsages, newVariables } = await dispatchWorkFlow({
const { flowResponses, flowUsages } = await dispatchWorkFlow({
res,
mode: 'test',
teamId,
@@ -99,11 +99,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
event: SseResponseEventEnum.flowResponses,
data: JSON.stringify(flowResponses)
});
responseWrite({
res,
event: SseResponseEventEnum.updateVariables,
data: JSON.stringify(newVariables)
});
res.end();
pushChatUsage({

View File

@@ -292,11 +292,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
});
if (detail) {
responseWrite({
res,
event: SseResponseEventEnum.updateVariables,
data: JSON.stringify(newVariables)
});
if (responseDetail) {
responseWrite({
res,
@@ -315,7 +310,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
return assistantResponses;
})();
res.json({
...(detail ? { responseData: feResponseData } : {}),
...(detail ? { responseData: feResponseData, newVariables } : {}),
id: chatId || '',
model: '',
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },

View File

@@ -74,7 +74,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
const prompts = messages.slice(-2);
const completionChatId = chatId ? chatId : nanoid();
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables,
@@ -123,7 +123,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
history: ChatBoxRef.current?.getChatHistories() || state.history
}));
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[appId, chatId, histories, pushHistory, router, setChatData, updateHistory]
);

View File

@@ -95,7 +95,7 @@ const OutLink = ({
'*'
);
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables: {
@@ -169,7 +169,7 @@ const OutLink = ({
'*'
);
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[
chatId,

View File

@@ -21,10 +21,7 @@ import ChatHistorySlider from './components/ChatHistorySlider';
import ChatHeader from './components/ChatHeader';
import { serviceSideProps } from '@/web/common/utils/i18n';
import { useTranslation } from 'next-i18next';
import {
checkChatSupportSelectFileByChatModels,
getAppQuestionGuidesByUserGuideModule
} from '@/web/core/chat/utils';
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
import { useChatStore } from '@/web/core/chat/storeChat';
import { customAlphabet } from 'nanoid';
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
@@ -38,8 +35,6 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
import MyBox from '@fastgpt/web/components/common/MyBox';
import SliderApps from './components/SliderApps';
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
import { getAppQGuideCustomURL } from '@/web/core/app/utils';
const OutLink = () => {
const { t } = useTranslation();
@@ -84,7 +79,7 @@ const OutLink = () => {
const prompts = messages.slice(-2);
const completionChatId = chatId ? chatId : nanoid();
const { responseText, responseData, newVariables } = await streamFetch({
const { responseText, responseData } = await streamFetch({
data: {
messages: prompts,
variables: {
@@ -140,9 +135,20 @@ const OutLink = () => {
history: ChatBoxRef.current?.getChatHistories() || state.history
}));
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
return { responseText, responseData, isNewChat: forbidRefresh.current };
},
[appId, teamToken, chatId, histories, pushHistory, router, setChatData, teamId, updateHistory]
[
chatId,
customVariables,
appId,
teamId,
teamToken,
setChatData,
pushHistory,
router,
histories,
updateHistory
]
);
/* replace router query to last chat */