mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-23 05:12:39 +00:00
Release update (#1580)
* release doc * fix: reg metch * perf: tool call arg * fix: stream update variables * remove status * update prompt * rename embeddong model
This commit is contained in:
@@ -108,7 +108,7 @@
|
||||
},
|
||||
{
|
||||
"model": "text-embedding-ada-002",
|
||||
"name": "Embedding-2",
|
||||
"name": "text-embedding-ada-002",
|
||||
"avatar": "/imgs/model/openai.svg",
|
||||
"charsPointsPrice": 0,
|
||||
"defaultToken": 512,
|
||||
|
@@ -59,6 +59,7 @@ import ChatItem from './components/ChatItem';
|
||||
import dynamic from 'next/dynamic';
|
||||
import { useCreation } from 'ahooks';
|
||||
import { AppChatConfigType } from '@fastgpt/global/core/app/type';
|
||||
import type { StreamResponseType } from '@/web/common/api/fetch';
|
||||
|
||||
const ResponseTags = dynamic(() => import('./ResponseTags'));
|
||||
const FeedbackModal = dynamic(() => import('./FeedbackModal'));
|
||||
@@ -90,12 +91,11 @@ type Props = OutLinkChatAuthProps & {
|
||||
chatId?: string;
|
||||
|
||||
onUpdateVariable?: (e: Record<string, any>) => void;
|
||||
onStartChat?: (e: StartChatFnProps) => Promise<{
|
||||
responseText: string;
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[];
|
||||
newVariables?: Record<string, any>;
|
||||
isNewChat?: boolean;
|
||||
}>;
|
||||
onStartChat?: (e: StartChatFnProps) => Promise<
|
||||
StreamResponseType & {
|
||||
isNewChat?: boolean;
|
||||
}
|
||||
>;
|
||||
onDelMessage?: (e: { contentId: string }) => void;
|
||||
};
|
||||
|
||||
@@ -207,7 +207,8 @@ const ChatBox = (
|
||||
status,
|
||||
name,
|
||||
tool,
|
||||
autoTTSResponse
|
||||
autoTTSResponse,
|
||||
variables
|
||||
}: generatingMessageProps & { autoTTSResponse?: boolean }) => {
|
||||
setChatHistories((state) =>
|
||||
state.map((item, index) => {
|
||||
@@ -290,6 +291,8 @@ const ChatBox = (
|
||||
return val;
|
||||
})
|
||||
};
|
||||
} else if (event === SseResponseEventEnum.updateVariables && variables) {
|
||||
setValue('variables', variables);
|
||||
}
|
||||
|
||||
return item;
|
||||
@@ -297,7 +300,7 @@ const ChatBox = (
|
||||
);
|
||||
generatingScroll();
|
||||
},
|
||||
[generatingScroll, setChatHistories, splitText2Audio]
|
||||
[generatingScroll, setChatHistories, setValue, splitText2Audio]
|
||||
);
|
||||
|
||||
// 重置输入内容
|
||||
@@ -466,7 +469,6 @@ const ChatBox = (
|
||||
const {
|
||||
responseData,
|
||||
responseText,
|
||||
newVariables,
|
||||
isNewChat = false
|
||||
} = await onStartChat({
|
||||
chatList: newChatList,
|
||||
@@ -476,8 +478,6 @@ const ChatBox = (
|
||||
variables: requestVariables
|
||||
});
|
||||
|
||||
newVariables && setValue('variables', newVariables);
|
||||
|
||||
isNewChatReplace.current = isNewChat;
|
||||
|
||||
// set finish status
|
||||
@@ -561,7 +561,6 @@ const ChatBox = (
|
||||
resetInputVal,
|
||||
setAudioPlayingChatId,
|
||||
setChatHistories,
|
||||
setValue,
|
||||
splitText2Audio,
|
||||
startSegmentedAudio,
|
||||
t,
|
||||
@@ -696,7 +695,7 @@ const ChatBox = (
|
||||
} catch (error) {}
|
||||
};
|
||||
},
|
||||
[appId, chatId, feedbackType, outLinkUid, shareId, teamId, teamToken]
|
||||
[appId, chatId, feedbackType, outLinkUid, setChatHistories, shareId, teamId, teamToken]
|
||||
);
|
||||
const onCloseUserLike = useCallback(
|
||||
(chat: ChatSiteItemType) => {
|
||||
|
@@ -8,11 +8,12 @@ import {
|
||||
import { SseResponseEventEnum } from '@fastgpt/global/core/workflow/runtime/constants';
|
||||
|
||||
export type generatingMessageProps = {
|
||||
event: `${SseResponseEventEnum}`;
|
||||
event: SseResponseEventEnum;
|
||||
text?: string;
|
||||
name?: string;
|
||||
status?: 'running' | 'finish';
|
||||
tool?: ToolModuleResponseItemType;
|
||||
variables?: Record<string, any>;
|
||||
};
|
||||
|
||||
export type UserInputFileItemType = {
|
||||
|
@@ -62,7 +62,7 @@ const ChatTest = (
|
||||
const history = chatList.slice(-historyMaxLen - 2, -2);
|
||||
|
||||
// 流请求,获取数据
|
||||
const { responseText, responseData, newVariables } = await streamFetch({
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
url: '/api/core/chat/chatTest',
|
||||
data: {
|
||||
history,
|
||||
@@ -78,7 +78,7 @@ const ChatTest = (
|
||||
abortCtrl: controller
|
||||
});
|
||||
|
||||
return { responseText, responseData, newVariables };
|
||||
return { responseText, responseData };
|
||||
},
|
||||
[appDetail._id, appDetail.name, edges, nodes]
|
||||
);
|
||||
|
@@ -72,7 +72,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
const { user } = await getUserChatInfoAndAuthTeamPoints(tmbId);
|
||||
|
||||
/* start process */
|
||||
const { flowResponses, flowUsages, newVariables } = await dispatchWorkFlow({
|
||||
const { flowResponses, flowUsages } = await dispatchWorkFlow({
|
||||
res,
|
||||
mode: 'test',
|
||||
teamId,
|
||||
@@ -99,11 +99,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
event: SseResponseEventEnum.flowResponses,
|
||||
data: JSON.stringify(flowResponses)
|
||||
});
|
||||
responseWrite({
|
||||
res,
|
||||
event: SseResponseEventEnum.updateVariables,
|
||||
data: JSON.stringify(newVariables)
|
||||
});
|
||||
|
||||
res.end();
|
||||
|
||||
pushChatUsage({
|
||||
|
@@ -292,11 +292,6 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
});
|
||||
|
||||
if (detail) {
|
||||
responseWrite({
|
||||
res,
|
||||
event: SseResponseEventEnum.updateVariables,
|
||||
data: JSON.stringify(newVariables)
|
||||
});
|
||||
if (responseDetail) {
|
||||
responseWrite({
|
||||
res,
|
||||
@@ -315,7 +310,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
return assistantResponses;
|
||||
})();
|
||||
res.json({
|
||||
...(detail ? { responseData: feResponseData } : {}),
|
||||
...(detail ? { responseData: feResponseData, newVariables } : {}),
|
||||
id: chatId || '',
|
||||
model: '',
|
||||
usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 1 },
|
||||
|
@@ -74,7 +74,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
|
||||
const prompts = messages.slice(-2);
|
||||
const completionChatId = chatId ? chatId : nanoid();
|
||||
|
||||
const { responseText, responseData, newVariables } = await streamFetch({
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
data: {
|
||||
messages: prompts,
|
||||
variables,
|
||||
@@ -123,7 +123,7 @@ const Chat = ({ appId, chatId }: { appId: string; chatId: string }) => {
|
||||
history: ChatBoxRef.current?.getChatHistories() || state.history
|
||||
}));
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current };
|
||||
},
|
||||
[appId, chatId, histories, pushHistory, router, setChatData, updateHistory]
|
||||
);
|
||||
|
@@ -95,7 +95,7 @@ const OutLink = ({
|
||||
'*'
|
||||
);
|
||||
|
||||
const { responseText, responseData, newVariables } = await streamFetch({
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
data: {
|
||||
messages: prompts,
|
||||
variables: {
|
||||
@@ -169,7 +169,7 @@ const OutLink = ({
|
||||
'*'
|
||||
);
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current };
|
||||
},
|
||||
[
|
||||
chatId,
|
||||
|
@@ -21,10 +21,7 @@ import ChatHistorySlider from './components/ChatHistorySlider';
|
||||
import ChatHeader from './components/ChatHeader';
|
||||
import { serviceSideProps } from '@/web/common/utils/i18n';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import {
|
||||
checkChatSupportSelectFileByChatModels,
|
||||
getAppQuestionGuidesByUserGuideModule
|
||||
} from '@/web/core/chat/utils';
|
||||
import { checkChatSupportSelectFileByChatModels } from '@/web/core/chat/utils';
|
||||
import { useChatStore } from '@/web/core/chat/storeChat';
|
||||
import { customAlphabet } from 'nanoid';
|
||||
const nanoid = customAlphabet('abcdefghijklmnopqrstuvwxyz1234567890', 12);
|
||||
@@ -38,8 +35,6 @@ import { getErrText } from '@fastgpt/global/common/error/utils';
|
||||
import MyBox from '@fastgpt/web/components/common/MyBox';
|
||||
import SliderApps from './components/SliderApps';
|
||||
import { GPTMessages2Chats } from '@fastgpt/global/core/chat/adapt';
|
||||
import { StoreNodeItemType } from '@fastgpt/global/core/workflow/type';
|
||||
import { getAppQGuideCustomURL } from '@/web/core/app/utils';
|
||||
|
||||
const OutLink = () => {
|
||||
const { t } = useTranslation();
|
||||
@@ -84,7 +79,7 @@ const OutLink = () => {
|
||||
const prompts = messages.slice(-2);
|
||||
const completionChatId = chatId ? chatId : nanoid();
|
||||
|
||||
const { responseText, responseData, newVariables } = await streamFetch({
|
||||
const { responseText, responseData } = await streamFetch({
|
||||
data: {
|
||||
messages: prompts,
|
||||
variables: {
|
||||
@@ -140,9 +135,20 @@ const OutLink = () => {
|
||||
history: ChatBoxRef.current?.getChatHistories() || state.history
|
||||
}));
|
||||
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current, newVariables };
|
||||
return { responseText, responseData, isNewChat: forbidRefresh.current };
|
||||
},
|
||||
[appId, teamToken, chatId, histories, pushHistory, router, setChatData, teamId, updateHistory]
|
||||
[
|
||||
chatId,
|
||||
customVariables,
|
||||
appId,
|
||||
teamId,
|
||||
teamToken,
|
||||
setChatData,
|
||||
pushHistory,
|
||||
router,
|
||||
histories,
|
||||
updateHistory
|
||||
]
|
||||
);
|
||||
|
||||
/* replace router query to last chat */
|
||||
|
@@ -18,10 +18,9 @@ type StreamFetchProps = {
|
||||
onMessage: StartChatFnProps['generatingMessage'];
|
||||
abortCtrl: AbortController;
|
||||
};
|
||||
type StreamResponseType = {
|
||||
export type StreamResponseType = {
|
||||
responseText: string;
|
||||
[DispatchNodeResponseKeyEnum.nodeResponse]: ChatHistoryItemResType[];
|
||||
newVariables: Record<string, any>;
|
||||
};
|
||||
class FatalError extends Error {}
|
||||
|
||||
@@ -50,7 +49,6 @@ export const streamFetch = ({
|
||||
)[] = [];
|
||||
let errMsg: string | undefined;
|
||||
let responseData: ChatHistoryItemResType[] = [];
|
||||
let newVariables: Record<string, any> = {};
|
||||
let finished = false;
|
||||
|
||||
const finish = () => {
|
||||
@@ -58,7 +56,6 @@ export const streamFetch = ({
|
||||
return failedFinish();
|
||||
}
|
||||
return resolve({
|
||||
newVariables,
|
||||
responseText,
|
||||
responseData
|
||||
});
|
||||
@@ -71,7 +68,7 @@ export const streamFetch = ({
|
||||
});
|
||||
};
|
||||
|
||||
const isAnswerEvent = (event: `${SseResponseEventEnum}`) =>
|
||||
const isAnswerEvent = (event: SseResponseEventEnum) =>
|
||||
event === SseResponseEventEnum.answer || event === SseResponseEventEnum.fastAnswer;
|
||||
// animate response to make it looks smooth
|
||||
function animateResponseText() {
|
||||
@@ -200,7 +197,10 @@ export const streamFetch = ({
|
||||
} else if (event === SseResponseEventEnum.flowResponses && Array.isArray(parseJson)) {
|
||||
responseData = parseJson;
|
||||
} else if (event === SseResponseEventEnum.updateVariables) {
|
||||
newVariables = parseJson;
|
||||
onMessage({
|
||||
event,
|
||||
variables: parseJson
|
||||
});
|
||||
} else if (event === SseResponseEventEnum.error) {
|
||||
if (parseJson.statusText === TeamErrEnum.aiPointsNotEnough) {
|
||||
useSystemStore.getState().setIsNotSufficientModal(true);
|
||||
|
Reference in New Issue
Block a user