mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-21 11:43:56 +00:00

* feat: rewrite chat context (#3176) * feat: add app auto execute (#3115) * feat: add app auto execute * auto exec configtion * chatting animation * change icon * fix * fix * fix link * feat: add chat context to all chatbox * perf: loading ui --------- Co-authored-by: heheer <heheer@sealos.io> * app auto exec (#3179) * add chat records loaded state (#3184) * perf: chat store reset storage (#3186) * perf: chat store reset storage * perf: auto exec code * chore: workflow ui (#3175) * chore: workflow ui * fix * change icon color config * change popover to mymenu * 4.8.14 test (#3189) * update doc * fix: token check * perf: icon button * update doc * feat: share page support configuration Whether to allow the original view (#3194) * update doc * perf: fix index (#3206) * perf: i18n * perf: Add service entry (#3226) * 4.8.14 test (#3228) * fix: ai log * fix: text splitter * fix: reference unselect & user form description & simple to advance (#3229) * fix: reference unselect & user form description & simple to advance * change abort position * perf * perf: code (#3232) * perf: code * update doc * fix: create btn permission (#3233) * update doc * fix: refresh chatbox listener * perf: check invalid reference * perf: check invalid reference * update doc * fix: ui props --------- Co-authored-by: heheer <heheer@sealos.io>
133 lines
2.8 KiB
TypeScript
133 lines
2.8 KiB
TypeScript
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
|
|
const { Schema, model, models } = connectionMongo;
|
|
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type.d';
|
|
import { TrainingTypeMap, DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constants';
|
|
import { DatasetCollectionName } from '../schema';
|
|
import {
|
|
TeamCollectionName,
|
|
TeamMemberCollectionName
|
|
} from '@fastgpt/global/support/user/team/constant';
|
|
|
|
export const DatasetColCollectionName = 'dataset_collections';
|
|
|
|
const DatasetCollectionSchema = new Schema({
|
|
parentId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: DatasetColCollectionName,
|
|
default: null
|
|
},
|
|
teamId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: TeamCollectionName,
|
|
required: true
|
|
},
|
|
tmbId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: TeamMemberCollectionName,
|
|
required: true
|
|
},
|
|
datasetId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: DatasetCollectionName,
|
|
required: true
|
|
},
|
|
type: {
|
|
type: String,
|
|
enum: Object.keys(DatasetCollectionTypeMap),
|
|
required: true
|
|
},
|
|
name: {
|
|
type: String,
|
|
required: true
|
|
},
|
|
createTime: {
|
|
type: Date,
|
|
default: () => new Date()
|
|
},
|
|
updateTime: {
|
|
type: Date,
|
|
default: () => new Date()
|
|
},
|
|
forbid: {
|
|
type: Boolean,
|
|
default: false
|
|
},
|
|
|
|
// chunk filed
|
|
trainingType: {
|
|
type: String,
|
|
enum: Object.keys(TrainingTypeMap)
|
|
},
|
|
chunkSize: {
|
|
type: Number,
|
|
required: true
|
|
},
|
|
chunkSplitter: {
|
|
type: String
|
|
},
|
|
qaPrompt: {
|
|
type: String
|
|
},
|
|
ocrParse: Boolean,
|
|
|
|
tags: {
|
|
type: [String],
|
|
default: []
|
|
},
|
|
|
|
// local file collection
|
|
fileId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: 'dataset.files'
|
|
},
|
|
// web link collection
|
|
rawLink: String,
|
|
// external collection
|
|
externalFileId: String,
|
|
|
|
// metadata
|
|
rawTextLength: Number,
|
|
hashRawText: String,
|
|
externalFileUrl: String, // external import url
|
|
metadata: {
|
|
type: Object,
|
|
default: {}
|
|
}
|
|
});
|
|
|
|
try {
|
|
// auth file
|
|
DatasetCollectionSchema.index({ teamId: 1, fileId: 1 });
|
|
|
|
// list collection; deep find collections
|
|
DatasetCollectionSchema.index({
|
|
teamId: 1,
|
|
datasetId: 1,
|
|
parentId: 1,
|
|
updateTime: -1
|
|
});
|
|
|
|
// Tag filter
|
|
DatasetCollectionSchema.index({ teamId: 1, datasetId: 1, tags: 1 });
|
|
// create time filter
|
|
DatasetCollectionSchema.index({ teamId: 1, datasetId: 1, createTime: 1 });
|
|
|
|
// Get collection by external file id
|
|
DatasetCollectionSchema.index(
|
|
{ datasetId: 1, externalFileId: 1 },
|
|
{
|
|
unique: true,
|
|
partialFilterExpression: {
|
|
externalFileId: { $exists: true }
|
|
}
|
|
}
|
|
);
|
|
} catch (error) {
|
|
console.log(error);
|
|
}
|
|
|
|
export const MongoDatasetCollection = getMongoModel<DatasetCollectionSchemaType>(
|
|
DatasetColCollectionName,
|
|
DatasetCollectionSchema
|
|
);
|