mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-22 20:37:48 +00:00
4.6.4 (#588)
This commit is contained in:
@@ -54,7 +54,14 @@ const ChatItemSchema = new Schema({
|
||||
type: String,
|
||||
default: ''
|
||||
},
|
||||
userFeedback: {
|
||||
userGoodFeedback: {
|
||||
type: String
|
||||
},
|
||||
userFeedback: String,
|
||||
userBadFeedback: {
|
||||
type: String
|
||||
},
|
||||
robotBadFeedback: {
|
||||
type: String
|
||||
},
|
||||
adminFeedback: {
|
||||
@@ -77,7 +84,10 @@ try {
|
||||
ChatItemSchema.index({ userId: 1 });
|
||||
ChatItemSchema.index({ appId: 1 });
|
||||
ChatItemSchema.index({ chatId: 1 });
|
||||
ChatItemSchema.index({ userFeedback: 1 });
|
||||
ChatItemSchema.index({ userGoodFeedback: 1 });
|
||||
ChatItemSchema.index({ userBadFeedback: 1 });
|
||||
ChatItemSchema.index({ robotBadFeedback: 1 });
|
||||
ChatItemSchema.index({ adminFeedback: 1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -6,6 +6,8 @@ import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset
|
||||
import { MongoDatasetCollection } from './schema';
|
||||
|
||||
export async function createOneCollection({
|
||||
teamId,
|
||||
tmbId,
|
||||
name,
|
||||
parentId,
|
||||
datasetId,
|
||||
@@ -14,8 +16,8 @@ export async function createOneCollection({
|
||||
chunkSize = 0,
|
||||
fileId,
|
||||
rawLink,
|
||||
teamId,
|
||||
tmbId,
|
||||
qaPrompt,
|
||||
hashRawText,
|
||||
metadata = {}
|
||||
}: CreateDatasetCollectionParams & { teamId: string; tmbId: string }) {
|
||||
const { _id } = await MongoDatasetCollection.create({
|
||||
@@ -29,6 +31,8 @@ export async function createOneCollection({
|
||||
chunkSize,
|
||||
fileId,
|
||||
rawLink,
|
||||
qaPrompt,
|
||||
hashRawText,
|
||||
metadata
|
||||
});
|
||||
|
||||
@@ -71,3 +75,19 @@ export function createDefaultCollection({
|
||||
updateTime: new Date('2099')
|
||||
});
|
||||
}
|
||||
|
||||
// check same collection
|
||||
export const getSameRawTextCollection = async ({
|
||||
datasetId,
|
||||
hashRawText
|
||||
}: {
|
||||
datasetId: string;
|
||||
hashRawText?: string;
|
||||
}) => {
|
||||
const collection = await MongoDatasetCollection.findOne({
|
||||
datasetId,
|
||||
hashRawText
|
||||
});
|
||||
|
||||
return collection;
|
||||
};
|
||||
|
@@ -72,6 +72,12 @@ const DatasetCollectionSchema = new Schema({
|
||||
rawLink: {
|
||||
type: String
|
||||
},
|
||||
qaPrompt: {
|
||||
type: String
|
||||
},
|
||||
hashRawText: {
|
||||
type: String
|
||||
},
|
||||
metadata: {
|
||||
type: Object,
|
||||
default: {}
|
||||
@@ -82,6 +88,7 @@ try {
|
||||
DatasetCollectionSchema.index({ datasetId: 1 });
|
||||
DatasetCollectionSchema.index({ datasetId: 1, parentId: 1 });
|
||||
DatasetCollectionSchema.index({ updateTime: -1 });
|
||||
DatasetCollectionSchema.index({ hashRawText: -1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -16,22 +16,22 @@ export async function delDatasetRelevantData({ datasetIds }: { datasetIds: strin
|
||||
datasetId: { $in: datasetIds }
|
||||
});
|
||||
|
||||
// delete related files
|
||||
await Promise.all(
|
||||
datasetIds.map((id) => delFileByMetadata({ bucketName: BucketNameEnum.dataset, datasetId: id }))
|
||||
);
|
||||
await delay(2000);
|
||||
|
||||
await delay(500);
|
||||
|
||||
// delete pg data
|
||||
await deletePgDataById(`dataset_id IN ('${datasetIds.join("','")}')`);
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ datasetId: { $in: datasetIds } });
|
||||
// delete pg data
|
||||
await deletePgDataById(`dataset_id IN ('${datasetIds.join("','")}')`);
|
||||
|
||||
// delete collections
|
||||
await MongoDatasetCollection.deleteMany({
|
||||
datasetId: { $in: datasetIds }
|
||||
});
|
||||
|
||||
// delete related files
|
||||
await Promise.all(
|
||||
datasetIds.map((id) => delFileByMetadata({ bucketName: BucketNameEnum.dataset, datasetId: id }))
|
||||
);
|
||||
}
|
||||
/**
|
||||
* delete all data by collectionIds
|
||||
@@ -51,6 +51,18 @@ export async function delCollectionRelevantData({
|
||||
collectionId: { $in: collectionIds }
|
||||
});
|
||||
|
||||
await delay(2000);
|
||||
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ collectionId: { $in: collectionIds } });
|
||||
// delete pg data
|
||||
await deletePgDataById(`collection_id IN ('${collectionIds.join("','")}')`);
|
||||
|
||||
// delete collections
|
||||
await MongoDatasetCollection.deleteMany({
|
||||
_id: { $in: collectionIds }
|
||||
});
|
||||
|
||||
// delete file and imgs
|
||||
await Promise.all([
|
||||
delImgByFileIdList(filterFileIds),
|
||||
@@ -59,13 +71,6 @@ export async function delCollectionRelevantData({
|
||||
fileIdList: filterFileIds
|
||||
})
|
||||
]);
|
||||
|
||||
await delay(500);
|
||||
|
||||
// delete pg data
|
||||
await deletePgDataById(`collection_id IN ('${collectionIds.join("','")}')`);
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ collectionId: { $in: collectionIds } });
|
||||
}
|
||||
/**
|
||||
* delete one data by mongoDataId
|
||||
|
@@ -71,7 +71,6 @@ const DatasetDataSchema = new Schema({
|
||||
],
|
||||
default: []
|
||||
},
|
||||
// metadata
|
||||
updateTime: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
@@ -89,6 +88,7 @@ try {
|
||||
DatasetDataSchema.index({ teamId: 1 });
|
||||
DatasetDataSchema.index({ datasetId: 1 });
|
||||
DatasetDataSchema.index({ collectionId: 1 });
|
||||
DatasetDataSchema.index({ updateTime: -1 });
|
||||
// full text index
|
||||
DatasetDataSchema.index({ datasetId: 1, fullTextToken: 'text' });
|
||||
DatasetDataSchema.index({ inited: 1 });
|
||||
|
Reference in New Issue
Block a user