mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-24 13:53:50 +00:00
4.8.6 fix (#1963)
* feat: log store * fix: full text search match query * perf: mongo schema import, Avoid duplicate import
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
import { connectionMongo, type Model } from '../../../common/mongo';
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { TrainingTypeMap, DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||
@@ -94,9 +94,6 @@ const DatasetCollectionSchema = new Schema({
|
||||
}
|
||||
});
|
||||
|
||||
export const MongoDatasetCollection: Model<DatasetCollectionSchemaType> =
|
||||
models[DatasetColCollectionName] || model(DatasetColCollectionName, DatasetCollectionSchema);
|
||||
|
||||
try {
|
||||
// auth file
|
||||
DatasetCollectionSchema.index({ teamId: 1, fileId: 1 });
|
||||
@@ -111,8 +108,11 @@ try {
|
||||
|
||||
// get forbid
|
||||
// DatasetCollectionSchema.index({ teamId: 1, datasetId: 1, forbid: 1 });
|
||||
|
||||
MongoDatasetCollection.syncIndexes({ background: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoDatasetCollection = getMongoModel<DatasetCollectionSchemaType>(
|
||||
DatasetColCollectionName,
|
||||
DatasetCollectionSchema
|
||||
);
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { connectionMongo, type Model } from '../../../common/mongo';
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetDataSchemaType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import {
|
||||
@@ -77,27 +77,23 @@ const DatasetDataSchema = new Schema({
|
||||
rebuilding: Boolean
|
||||
});
|
||||
|
||||
export const MongoDatasetData: Model<DatasetDataSchemaType> =
|
||||
models[DatasetDataCollectionName] || model(DatasetDataCollectionName, DatasetDataSchema);
|
||||
// list collection and count data; list data; delete collection(relate data)
|
||||
DatasetDataSchema.index({
|
||||
teamId: 1,
|
||||
datasetId: 1,
|
||||
collectionId: 1,
|
||||
chunkIndex: 1,
|
||||
updateTime: -1
|
||||
});
|
||||
// full text index
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
|
||||
DatasetDataSchema.index({ updateTime: 1 });
|
||||
// rebuild data
|
||||
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
|
||||
|
||||
try {
|
||||
// list collection and count data; list data; delete collection(relate data)
|
||||
DatasetDataSchema.index({
|
||||
teamId: 1,
|
||||
datasetId: 1,
|
||||
collectionId: 1,
|
||||
chunkIndex: 1,
|
||||
updateTime: -1
|
||||
});
|
||||
// full text index
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
|
||||
DatasetDataSchema.index({ updateTime: 1 });
|
||||
// rebuild data
|
||||
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
|
||||
|
||||
MongoDatasetData.syncIndexes({ background: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
export const MongoDatasetData = getMongoModel<DatasetDataSchemaType>(
|
||||
DatasetDataCollectionName,
|
||||
DatasetDataSchema
|
||||
);
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import { connectionMongo, type Model } from '../../common/mongo';
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetSchemaType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import {
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
} from '@fastgpt/global/support/user/team/constant';
|
||||
import { PermissionTypeEnum, PermissionTypeMap } from '@fastgpt/global/support/permission/constant';
|
||||
import { DatasetDefaultPermissionVal } from '@fastgpt/global/support/permission/dataset/constant';
|
||||
|
||||
export const DatasetCollectionName = 'datasets';
|
||||
@@ -99,6 +98,4 @@ try {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoDataset: Model<DatasetSchemaType> =
|
||||
models[DatasetCollectionName] || model(DatasetCollectionName, DatasetSchema);
|
||||
MongoDataset.syncIndexes();
|
||||
export const MongoDataset = getMongoModel<DatasetSchemaType>(DatasetCollectionName, DatasetSchema);
|
||||
|
@@ -212,7 +212,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
{
|
||||
$match: {
|
||||
$expr: { $eq: ['$_id', '$$collectionId'] },
|
||||
forbid: { $eq: false } // 直接在lookup阶段过滤
|
||||
forbid: { $eq: true } // 匹配被禁用的数据
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -226,7 +226,7 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
collection: { $ne: [] }
|
||||
collection: { $eq: [] } // 没有 forbid=true 的数据
|
||||
}
|
||||
},
|
||||
{
|
||||
|
@@ -1,5 +1,5 @@
|
||||
/* 模型的知识库 */
|
||||
import { connectionMongo, type Model } from '../../../common/mongo';
|
||||
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||
@@ -103,7 +103,7 @@ try {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoDatasetTraining: Model<DatasetTrainingSchemaType> =
|
||||
models[DatasetTrainingCollectionName] || model(DatasetTrainingCollectionName, TrainingDataSchema);
|
||||
|
||||
MongoDatasetTraining.syncIndexes();
|
||||
export const MongoDatasetTraining = getMongoModel<DatasetTrainingSchemaType>(
|
||||
DatasetTrainingCollectionName,
|
||||
TrainingDataSchema
|
||||
);
|
||||
|
Reference in New Issue
Block a user