perf: response tag;feat: history quote

This commit is contained in:
archer
2023-08-15 09:55:00 +08:00
parent b8a65e1742
commit cc57a7e27e
9 changed files with 39 additions and 32 deletions

View File

@@ -51,7 +51,7 @@ const ResponseTags = ({
bg: 'transparent'
};
return (
return responseData.length === 0 ? null : (
<Flex alignItems={'center'} mt={2} flexWrap={'wrap'}>
{quoteList.length > 0 && (
<MyTooltip label="查看引用">

View File

@@ -54,6 +54,7 @@ import styles from './index.module.scss';
const textareaMinH = '22px';
type generatingMessageProps = { text?: string; name?: string; status?: 'running' | 'finish' };
export type StartChatFnProps = {
chatList: ChatSiteItemType[];
messages: MessageItemType[];
controller: AbortController;
variables: Record<string, any>;
@@ -311,6 +312,7 @@ const ChatBox = (
const messages = adaptChatItem_openAI({ messages: newChatList, reserveId: true });
const { responseData } = await onStartChat({
chatList: newChatList,
messages,
controller: abortSignal,
generatingMessage,

View File

@@ -26,6 +26,12 @@ const PayRecordTable = () => {
const [payOrders, setPayOrders] = useState<PaySchema[]>([]);
const { toast } = useToast();
const { isInitialLoading, refetch } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
const handleRefreshPayOrder = useCallback(
async (payId: string) => {
setIsLoading(true);
@@ -36,8 +42,6 @@ const PayRecordTable = () => {
title: data,
status: 'success'
});
const res = await getPayOrders();
setPayOrders(res);
} catch (error: any) {
toast({
title: error?.message,
@@ -45,18 +49,15 @@ const PayRecordTable = () => {
});
console.log(error);
}
try {
refetch();
} catch (error) {}
setIsLoading(false);
},
[setIsLoading, toast]
[refetch, setIsLoading, toast]
);
const { isInitialLoading } = useQuery(['initPayOrder'], getPayOrders, {
onSuccess(res) {
setPayOrders(res);
}
});
return (
<Box position={'relative'} h={'100%'}>
{!isInitialLoading && payOrders.length === 0 ? (

View File

@@ -4,16 +4,14 @@ import { authUser } from '@/service/utils/auth';
import { sseErrRes } from '@/service/response';
import { sseResponseEventEnum } from '@/constants/chat';
import { sseResponse } from '@/service/utils/tools';
import { type ChatCompletionRequestMessage } from 'openai';
import { AppModuleItemType } from '@/types/app';
import { dispatchModules } from '../openapi/v1/chat/completions';
import { gptMessage2ChatType } from '@/utils/adapt';
import { pushTaskBill } from '@/service/events/pushBill';
import { BillSourceEnum } from '@/constants/user';
import { ChatItemType } from '@/types/chat';
export type MessageItemType = ChatCompletionRequestMessage & { _id?: string };
export type Props = {
history: MessageItemType[];
history: ChatItemType[];
prompt: string;
modules: AppModuleItemType[];
variables: Record<string, any>;
@@ -51,7 +49,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
variables,
user,
params: {
history: gptMessage2ChatType(history),
history,
userChatInput: prompt
},
stream: true,

View File

@@ -5,6 +5,7 @@ import { authUser } from '@/service/utils/auth';
import { connectToDatabase, Chat } from '@/service/mongo';
import { Types } from 'mongoose';
import type { ChatItemType } from '@/types/chat';
import { TaskResponseKeyEnum } from '@/constants/chat';
export type Props = {
chatId?: string;
@@ -55,10 +56,12 @@ export async function getChatHistory({
{
$project: {
obj: '$content.obj',
value: '$content.value'
value: '$content.value',
[TaskResponseKeyEnum.responseData]: `$content.responseData`
}
}
]);
console.log(history);
return { history };
}

View File

@@ -38,19 +38,19 @@ const ChatTest = (
const isOpen = useMemo(() => modules && modules.length > 0, [modules]);
const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => {
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest',
data: {
history,
prompt: messages[messages.length - 2].content,
prompt: chatList[chatList.length - 2].value,
modules,
variables,
appId: app._id,

View File

@@ -572,19 +572,19 @@ const ChatTest = ({ appId }: { appId: string }) => {
const [modules, setModules] = useState<AppModuleItemType[]>([]);
const startChat = useCallback(
async ({ messages, controller, generatingMessage, variables }: StartChatFnProps) => {
async ({ chatList, controller, generatingMessage, variables }: StartChatFnProps) => {
const historyMaxLen =
modules
?.find((item) => item.flowType === FlowModuleTypeEnum.historyNode)
?.inputs?.find((item) => item.key === 'maxContext')?.value || 0;
const history = messages.slice(-historyMaxLen - 2, -2);
const history = chatList.slice(-historyMaxLen - 2, -2);
// 流请求,获取数据
const { responseText, responseData } = await streamFetch({
url: '/api/chat/chatTest',
data: {
history,
prompt: messages[messages.length - 2].content,
prompt: chatList[chatList.length - 2].value,
modules,
variables,
appId,

View File

@@ -63,6 +63,7 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
}
const { filterQuoteQA, quotePrompt } = filterQuote({
history,
quoteQA,
model: modelConstantsData
});
@@ -181,23 +182,32 @@ export const dispatchChatCompletion = async (props: Record<string, any>): Promis
};
function filterQuote({
history = [],
quoteQA = [],
model
}: {
history: ChatProps['history'];
quoteQA: ChatProps['quoteQA'];
model: ChatModelItemType;
}) {
// concat history quote
const historyQuote =
history[history.length - 1]?.responseData
?.find((item) => item.moduleName === ChatModuleEnum.AIChat)
?.quoteList?.filter((item) => !quoteQA.find((quote) => quote.id === item.id)) || [];
const concatQuote = quoteQA.concat(historyQuote.slice(0, 3));
const sliceResult = modelToolMap.tokenSlice({
model: model.model,
maxToken: model.quoteMaxToken,
messages: quoteQA.map((item, i) => ({
messages: concatQuote.map((item, i) => ({
obj: ChatRoleEnum.System,
value: item.a ? `{instruction:${item.q},output:${item.a}}` : `{instruction:${item.q}}`
}))
});
// slice filterSearch
const filterQuoteQA = quoteQA.slice(0, sliceResult.length);
const filterQuoteQA = concatQuote.slice(0, sliceResult.length);
const quotePrompt =
filterQuoteQA.length > 0

View File

@@ -9,7 +9,6 @@ import { PgTrainingTableName } from '@/constants/plugin';
type KBSearchProps = {
kbList: SelectedKbType;
history: ChatItemType[];
similarity: number;
limit: number;
userChatInput: string;
@@ -22,13 +21,7 @@ export type KBSearchResponse = {
};
export async function dispatchKBSearch(props: Record<string, any>): Promise<KBSearchResponse> {
const {
kbList = [],
history = [],
similarity = 0.8,
limit = 5,
userChatInput
} = props as KBSearchProps;
const { kbList = [], similarity = 0.8, limit = 5, userChatInput } = props as KBSearchProps;
if (kbList.length === 0) {
return Promise.reject("You didn't choose the knowledge base");