mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 12:20:34 +00:00
perf: ery extension and fix filter same embedding result (#3833)
* perf: ery extension and fix filter same embedding result * fix: extract node too long * perf: ui * perf: not chatId will auto save * fix: laf * fix: member load * feat: add completions unstream error response * feat: add completions unstream error response * updat emodel provider
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import { connectionMongo, getMongoModel } from '../../common/mongo';
|
||||
const { Schema } = connectionMongo;
|
||||
import { ChatSchema as ChatType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatSourceEnum, ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
@@ -52,11 +52,10 @@ const ChatSchema = new Schema({
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
required: true
|
||||
},
|
||||
sourceName: {
|
||||
type: String
|
||||
required: true,
|
||||
enum: Object.values(ChatSourceEnum)
|
||||
},
|
||||
sourceName: String,
|
||||
shareId: {
|
||||
type: String
|
||||
},
|
||||
|
@@ -1,6 +1,10 @@
|
||||
import type { AIChatItemType, UserChatItemType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { MongoApp } from '../app/schema';
|
||||
import { ChatItemValueTypeEnum, ChatRoleEnum } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
ChatItemValueTypeEnum,
|
||||
ChatRoleEnum,
|
||||
ChatSourceEnum
|
||||
} from '@fastgpt/global/core/chat/constants';
|
||||
import { MongoChatItem } from './chatItemSchema';
|
||||
import { MongoChat } from './chatSchema';
|
||||
import { addLog } from '../../common/system/log';
|
||||
@@ -22,8 +26,8 @@ type Props = {
|
||||
variables?: Record<string, any>;
|
||||
isUpdateUseTime: boolean;
|
||||
newTitle: string;
|
||||
source: string;
|
||||
sourceName: string;
|
||||
source: `${ChatSourceEnum}`;
|
||||
sourceName?: string;
|
||||
shareId?: string;
|
||||
outLinkUid?: string;
|
||||
content: [UserChatItemType & { dataId?: string }, AIChatItemType & { dataId?: string }];
|
||||
|
@@ -383,6 +383,7 @@ export async function searchDatasetData(
|
||||
).lean()
|
||||
]);
|
||||
|
||||
const set = new Map<string, number>();
|
||||
const formatResult = results
|
||||
.map((item, index) => {
|
||||
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
|
||||
@@ -398,8 +399,6 @@ export async function searchDatasetData(
|
||||
return;
|
||||
}
|
||||
|
||||
const score = item?.score || 0;
|
||||
|
||||
const result: SearchDataResponseItemType = {
|
||||
id: String(data._id),
|
||||
updateTime: data.updateTime,
|
||||
@@ -409,12 +408,24 @@ export async function searchDatasetData(
|
||||
datasetId: String(data.datasetId),
|
||||
collectionId: String(data.collectionId),
|
||||
...getCollectionSourceData(collection),
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: score, index }]
|
||||
score: [{ type: SearchScoreTypeEnum.embedding, value: item?.score || 0, index }]
|
||||
};
|
||||
|
||||
return result;
|
||||
})
|
||||
.filter(Boolean) as SearchDataResponseItemType[];
|
||||
.filter((item) => {
|
||||
if (!item) return false;
|
||||
if (set.has(item.id)) return false;
|
||||
set.set(item.id, 1);
|
||||
return true;
|
||||
})
|
||||
.map((item, index) => {
|
||||
if (!item) return;
|
||||
return {
|
||||
...item,
|
||||
score: item.score.map((item) => ({ ...item, index }))
|
||||
};
|
||||
}) as SearchDataResponseItemType[];
|
||||
|
||||
return {
|
||||
embeddingRecallResults: formatResult,
|
||||
@@ -717,11 +728,12 @@ export const defaultSearchDatasetData = async ({
|
||||
? getLLMModel(datasetSearchExtensionModel)
|
||||
: undefined;
|
||||
|
||||
const { concatQueries, rewriteQuery, aiExtensionResult } = await datasetSearchQueryExtension({
|
||||
query,
|
||||
extensionModel,
|
||||
extensionBg: datasetSearchExtensionBg
|
||||
});
|
||||
const { concatQueries, extensionQueries, rewriteQuery, aiExtensionResult } =
|
||||
await datasetSearchQueryExtension({
|
||||
query,
|
||||
extensionModel,
|
||||
extensionBg: datasetSearchExtensionBg
|
||||
});
|
||||
|
||||
const result = await searchDatasetData({
|
||||
...props,
|
||||
@@ -736,7 +748,7 @@ export const defaultSearchDatasetData = async ({
|
||||
model: aiExtensionResult.model,
|
||||
inputTokens: aiExtensionResult.inputTokens,
|
||||
outputTokens: aiExtensionResult.outputTokens,
|
||||
query: concatQueries.join('\n')
|
||||
query: extensionQueries.join('\n')
|
||||
}
|
||||
: undefined
|
||||
};
|
||||
|
@@ -72,12 +72,15 @@ Human: ${query}
|
||||
if (result.extensionQueries?.length === 0) return;
|
||||
return result;
|
||||
})();
|
||||
|
||||
const extensionQueries = filterSamQuery(aiExtensionResult?.extensionQueries || []);
|
||||
if (aiExtensionResult) {
|
||||
queries = filterSamQuery(queries.concat(aiExtensionResult.extensionQueries));
|
||||
queries = filterSamQuery(queries.concat(extensionQueries));
|
||||
rewriteQuery = queries.join('\n');
|
||||
}
|
||||
|
||||
return {
|
||||
extensionQueries,
|
||||
concatQueries: queries,
|
||||
rewriteQuery,
|
||||
aiExtensionResult
|
||||
|
Reference in New Issue
Block a user