mirror of
https://github.com/labring/FastGPT.git
synced 2025-08-01 11:58:38 +00:00
perf: quote response
This commit is contained in:
@@ -3,6 +3,7 @@ import { jsonRes } from '@/service/response';
|
||||
import { connectToDatabase, Chat } from '@/service/mongo';
|
||||
import { authUser } from '@/service/utils/auth';
|
||||
import { Types } from 'mongoose';
|
||||
import { rawSearchKey } from '@/constants/chat';
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
try {
|
||||
@@ -35,13 +36,13 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
quote: '$content.quote'
|
||||
[rawSearchKey]: `$content.${rawSearchKey}`
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
||||
jsonRes(res, {
|
||||
data: history[0]?.quote || []
|
||||
data: history[0]?.[rawSearchKey] || []
|
||||
});
|
||||
} catch (err) {
|
||||
jsonRes(res, {
|
||||
|
@@ -11,7 +11,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
contentId,
|
||||
quoteId,
|
||||
sourceText = ''
|
||||
} = req.query as {
|
||||
} = req.body as {
|
||||
historyId: string;
|
||||
contentId: string;
|
||||
quoteId: string;
|
||||
@@ -33,7 +33,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
},
|
||||
{
|
||||
$set: {
|
||||
'content.$.quote.$[quoteElem].source': sourceText
|
||||
'content.$.rawSearch.$[quoteElem].source': sourceText
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@@ -9,6 +9,7 @@ import mongoose from 'mongoose';
|
||||
import type { AppSchema, ChatSchema } from '@/types/mongoSchema';
|
||||
import { FlowModuleTypeEnum } from '@/constants/flow';
|
||||
import { SystemInputEnum } from '@/constants/app';
|
||||
import { quoteLenKey, rawSearchKey } from '@/constants/chat';
|
||||
|
||||
/* 初始化我的聊天框,需要身份验证 */
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
@@ -82,8 +83,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
_id: '$content._id',
|
||||
obj: '$content.obj',
|
||||
value: '$content.value',
|
||||
systemPrompt: '$content.systemPrompt',
|
||||
quoteLen: { $size: { $ifNull: ['$content.quote', []] } }
|
||||
[quoteLenKey]: { $size: { $ifNull: [`$content.${rawSearchKey}`, []] } }
|
||||
}
|
||||
}
|
||||
]);
|
||||
|
@@ -128,7 +128,6 @@ export async function chatCompletion({
|
||||
|
||||
const adaptMessages = adaptChatItem_openAI({ messages: filterMessages, reserveId: false });
|
||||
const chatAPI = getOpenAIApi();
|
||||
console.log(adaptMessages);
|
||||
|
||||
/* count response max token */
|
||||
const promptsToken = modelToolMap.countTokens({
|
||||
|
@@ -3,13 +3,14 @@ import { jsonRes } from '@/service/response';
|
||||
import { PgClient } from '@/service/pg';
|
||||
import { withNextCors } from '@/service/utils/tools';
|
||||
import type { ChatItemType } from '@/types/chat';
|
||||
import { ChatRoleEnum } from '@/constants/chat';
|
||||
import { ChatRoleEnum, rawSearchKey } from '@/constants/chat';
|
||||
import { modelToolMap } from '@/utils/plugin';
|
||||
import { getVector } from '../../plugin/vector';
|
||||
import { countModelPrice, pushTaskBillListItem } from '@/service/events/pushBill';
|
||||
import { getModel } from '@/service/utils/data';
|
||||
|
||||
export type QuoteItemType = {
|
||||
kb_id: string;
|
||||
id: string;
|
||||
q: string;
|
||||
a: string;
|
||||
@@ -26,7 +27,7 @@ type Props = {
|
||||
billId?: string;
|
||||
};
|
||||
type Response = {
|
||||
rawSearch: QuoteItemType[];
|
||||
[rawSearchKey]: QuoteItemType[];
|
||||
isEmpty?: boolean;
|
||||
quotePrompt?: string;
|
||||
};
|
||||
@@ -85,7 +86,7 @@ export async function kbSearch({
|
||||
PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL ivfflat.probes = ${global.systemEnv.pgIvfflatProbe || 10};
|
||||
select id,q,a,source from modelData where kb_id IN (${kb_ids
|
||||
select kb_id,id,q,a,source from modelData where kb_id IN (${kb_ids
|
||||
.map((item) => `'${item}'`)
|
||||
.join(',')}) AND vector <#> '[${vectors[0]}]' < -${similarity} order by vector <#> '[${
|
||||
vectors[0]
|
||||
|
@@ -42,11 +42,11 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
|
||||
export function gpt_chatItemTokenSlice({
|
||||
messages,
|
||||
model,
|
||||
model = 'gpt-3.5-turbo',
|
||||
maxToken
|
||||
}: {
|
||||
messages: ChatItemType[];
|
||||
model: ModelType;
|
||||
model?: ModelType;
|
||||
maxToken: number;
|
||||
}) {
|
||||
let result: ChatItemType[] = [];
|
||||
|
@@ -94,6 +94,8 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
getChatHistory({ historyId, userId })
|
||||
]);
|
||||
|
||||
const isOwner = !shareId && userId === String(app.userId);
|
||||
|
||||
const prompts = history.concat(gptMessage2ChatType(messages));
|
||||
if (prompts[prompts.length - 1].obj === 'AI') {
|
||||
prompts.pop();
|
||||
@@ -143,24 +145,30 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
_id: messages[messages.length - 1]._id,
|
||||
obj: ChatRoleEnum.AI,
|
||||
value: answerText,
|
||||
responseData
|
||||
...responseData
|
||||
}
|
||||
],
|
||||
userId
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`finish time: ${(Date.now() - startTime) / 100}s`);
|
||||
|
||||
if (stream) {
|
||||
sseResponse({
|
||||
res,
|
||||
event: sseResponseEventEnum.answer,
|
||||
data: '[DONE]'
|
||||
});
|
||||
sseResponse({
|
||||
res,
|
||||
event: sseResponseEventEnum.appStreamResponse,
|
||||
data: JSON.stringify(responseData)
|
||||
});
|
||||
|
||||
if (isOwner) {
|
||||
sseResponse({
|
||||
res,
|
||||
event: sseResponseEventEnum.appStreamResponse,
|
||||
data: JSON.stringify(responseData)
|
||||
});
|
||||
}
|
||||
|
||||
res.end();
|
||||
} else {
|
||||
res.json({
|
||||
@@ -189,7 +197,6 @@ export default withNextCors(async function handler(req: NextApiRequest, res: Nex
|
||||
delTaskBill(billId);
|
||||
|
||||
if (stream) {
|
||||
res.status(500);
|
||||
sseErrRes(res, err);
|
||||
res.end();
|
||||
} else {
|
||||
|
@@ -29,7 +29,7 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse<
|
||||
const where: any = [['user_id', userId], 'AND', ['id', dataId]];
|
||||
|
||||
const searchRes = await PgClient.select<KbDataItemType>('modelData', {
|
||||
fields: ['id', 'q', 'a', 'source'],
|
||||
fields: ['kb_id', 'id', 'q', 'a', 'source'],
|
||||
where,
|
||||
limit: 1
|
||||
});
|
||||
|
Reference in New Issue
Block a user