mirror of
https://github.com/labring/FastGPT.git
synced 2026-04-26 02:07:28 +08:00
2e18f1ebc2
* next 15 * lock * feat: rename .d.ts to .ts for Next 15 compatibility - Rename 104 .d.ts files to .ts (Next 15 no longer supports .d.ts in src) - Remove 5 redundant .d.ts files that had .ts counterparts - Update all import paths: remove .d suffix from 100 import statements - Update tsconfig.json include patterns across all packages - Add pnpm overrides to unify react@18.3.1 across monorepo - Fix react version mismatch (packages/global and packages/service were resolving to react@19.1.1) * fix: resolve 61 TypeScript errors from .d.ts to .ts migration - Fix broken imports using non-relative module paths (e.g. 'support/user/team/type' → relative paths) - Remove unused/dead imports referencing deleted modules - Fix duplicate identifiers (show_emptyChat, concatMd, TrainingModeEnum) - Add missing imports (BoxProps, GroupMemberRole, UsageSourceEnum, dashboard_evaluation) - Fix generic type constraints (OutLinkEditType, createShareChat) - Replace removed types with correct alternatives (ChatModelItemType → LLMModelItemType) - Delete 5 dead code files with 0 references - Add global type declaration for countTrackQueue - Fix nullable type narrowing (sourceMember, ParentIdType, optional app fields) * refactor: replace as ClientSession assertion with proper type narrowing via Omit & intersection * fix: remove experimental.workerThreads to fix DataCloneError in Next 15 static generation Next 15 worker threads attempt to structuredClone the config object, which fails on the webpack function. workerThreads is not needed for the build to work correctly. * Update document/content/docs/upgrading/4-14/4148.mdx Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: ts * update next config * update next * fix: dockerfile * fix: comment --------- Co-authored-by: Archer <c121914yu@gmail.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
160 lines
3.4 KiB
TypeScript
160 lines
3.4 KiB
TypeScript
import { getMongoModel, Schema } from '../../common/mongo';
|
|
import {
|
|
ChunkSettingModeEnum,
|
|
ChunkTriggerConfigTypeEnum,
|
|
DataChunkSplitModeEnum,
|
|
DatasetCollectionDataProcessModeEnum,
|
|
DatasetTypeEnum,
|
|
DatasetTypeMap,
|
|
ParagraphChunkAIModeEnum
|
|
} from '@fastgpt/global/core/dataset/constants';
|
|
import {
|
|
TeamCollectionName,
|
|
TeamMemberCollectionName
|
|
} from '@fastgpt/global/support/user/team/constant';
|
|
import type { DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
|
import { getLogger, LogCategories } from '../../common/logger';
|
|
|
|
export const DatasetCollectionName = 'datasets';
|
|
|
|
export const ChunkSettings = {
|
|
trainingType: {
|
|
type: String,
|
|
enum: Object.values(DatasetCollectionDataProcessModeEnum)
|
|
},
|
|
|
|
chunkTriggerType: {
|
|
type: String,
|
|
enum: Object.values(ChunkTriggerConfigTypeEnum)
|
|
},
|
|
chunkTriggerMinSize: Number,
|
|
|
|
dataEnhanceCollectionName: Boolean,
|
|
|
|
imageIndex: Boolean,
|
|
autoIndexes: Boolean,
|
|
indexPrefixTitle: Boolean,
|
|
|
|
chunkSettingMode: {
|
|
type: String,
|
|
enum: Object.values(ChunkSettingModeEnum)
|
|
},
|
|
chunkSplitMode: {
|
|
type: String,
|
|
enum: Object.values(DataChunkSplitModeEnum)
|
|
},
|
|
paragraphChunkAIMode: {
|
|
type: String,
|
|
enum: Object.values(ParagraphChunkAIModeEnum)
|
|
},
|
|
paragraphChunkDeep: Number,
|
|
paragraphChunkMinSize: Number,
|
|
chunkSize: Number,
|
|
chunkSplitter: String,
|
|
|
|
indexSize: Number,
|
|
qaPrompt: String
|
|
};
|
|
|
|
const DatasetSchema = new Schema({
|
|
parentId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: DatasetCollectionName,
|
|
default: null
|
|
},
|
|
userId: {
|
|
//abandon
|
|
type: Schema.Types.ObjectId,
|
|
ref: 'user'
|
|
},
|
|
teamId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: TeamCollectionName,
|
|
required: true
|
|
},
|
|
tmbId: {
|
|
type: Schema.Types.ObjectId,
|
|
ref: TeamMemberCollectionName,
|
|
required: true
|
|
},
|
|
type: {
|
|
type: String,
|
|
enum: Object.keys(DatasetTypeMap),
|
|
required: true,
|
|
default: DatasetTypeEnum.dataset
|
|
},
|
|
avatar: {
|
|
type: String,
|
|
default: '/icon/logo.svg'
|
|
},
|
|
name: {
|
|
type: String,
|
|
required: true
|
|
},
|
|
updateTime: {
|
|
type: Date,
|
|
default: () => new Date()
|
|
},
|
|
vectorModel: {
|
|
type: String,
|
|
required: true,
|
|
default: 'text-embedding-3-small'
|
|
},
|
|
agentModel: {
|
|
type: String,
|
|
required: true,
|
|
default: 'gpt-4o-mini'
|
|
},
|
|
vlmModel: String,
|
|
intro: {
|
|
type: String,
|
|
default: ''
|
|
},
|
|
websiteConfig: {
|
|
type: {
|
|
url: {
|
|
type: String,
|
|
required: true
|
|
},
|
|
selector: {
|
|
type: String,
|
|
default: 'body'
|
|
}
|
|
}
|
|
},
|
|
chunkSettings: {
|
|
type: ChunkSettings
|
|
},
|
|
inheritPermission: {
|
|
type: Boolean,
|
|
default: true
|
|
},
|
|
|
|
apiDatasetServer: Object,
|
|
|
|
// 软删除标记字段
|
|
deleteTime: {
|
|
type: Date,
|
|
default: null // null表示未删除,有值表示删除时间
|
|
},
|
|
|
|
// abandoned
|
|
autoSync: Boolean,
|
|
externalReadUrl: String,
|
|
defaultPermission: Number,
|
|
apiServer: Object,
|
|
feishuServer: Object,
|
|
yuqueServer: Object
|
|
});
|
|
|
|
try {
|
|
DatasetSchema.index({ teamId: 1 });
|
|
DatasetSchema.index({ type: 1 }); // Admin count
|
|
DatasetSchema.index({ deleteTime: 1 }); // 添加软删除字段索引
|
|
} catch (error) {
|
|
const logger = getLogger(LogCategories.INFRA.MONGO);
|
|
logger.error('Failed to build dataset indexes', { error });
|
|
}
|
|
|
|
export const MongoDataset = getMongoModel<DatasetSchemaType>(DatasetCollectionName, DatasetSchema);
|