This commit is contained in:
Archer
2023-12-11 15:12:14 +08:00
committed by GitHub
parent 84cf6b5658
commit d2d7eac9e0
105 changed files with 1091 additions and 801 deletions

View File

@@ -54,7 +54,14 @@ const ChatItemSchema = new Schema({
type: String,
default: ''
},
userFeedback: {
userGoodFeedback: {
type: String
},
userFeedback: String,
userBadFeedback: {
type: String
},
robotBadFeedback: {
type: String
},
adminFeedback: {
@@ -77,7 +84,10 @@ try {
ChatItemSchema.index({ userId: 1 });
ChatItemSchema.index({ appId: 1 });
ChatItemSchema.index({ chatId: 1 });
ChatItemSchema.index({ userFeedback: 1 });
ChatItemSchema.index({ userGoodFeedback: 1 });
ChatItemSchema.index({ userBadFeedback: 1 });
ChatItemSchema.index({ robotBadFeedback: 1 });
ChatItemSchema.index({ adminFeedback: 1 });
} catch (error) {
console.log(error);
}

View File

@@ -6,6 +6,8 @@ import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset
import { MongoDatasetCollection } from './schema';
export async function createOneCollection({
teamId,
tmbId,
name,
parentId,
datasetId,
@@ -14,8 +16,8 @@ export async function createOneCollection({
chunkSize = 0,
fileId,
rawLink,
teamId,
tmbId,
qaPrompt,
hashRawText,
metadata = {}
}: CreateDatasetCollectionParams & { teamId: string; tmbId: string }) {
const { _id } = await MongoDatasetCollection.create({
@@ -29,6 +31,8 @@ export async function createOneCollection({
chunkSize,
fileId,
rawLink,
qaPrompt,
hashRawText,
metadata
});
@@ -71,3 +75,19 @@ export function createDefaultCollection({
updateTime: new Date('2099')
});
}
// check same collection
export const getSameRawTextCollection = async ({
datasetId,
hashRawText
}: {
datasetId: string;
hashRawText?: string;
}) => {
const collection = await MongoDatasetCollection.findOne({
datasetId,
hashRawText
});
return collection;
};

View File

@@ -72,6 +72,12 @@ const DatasetCollectionSchema = new Schema({
rawLink: {
type: String
},
qaPrompt: {
type: String
},
hashRawText: {
type: String
},
metadata: {
type: Object,
default: {}
@@ -82,6 +88,7 @@ try {
DatasetCollectionSchema.index({ datasetId: 1 });
DatasetCollectionSchema.index({ datasetId: 1, parentId: 1 });
DatasetCollectionSchema.index({ updateTime: -1 });
DatasetCollectionSchema.index({ hashRawText: -1 });
} catch (error) {
console.log(error);
}

View File

@@ -16,22 +16,22 @@ export async function delDatasetRelevantData({ datasetIds }: { datasetIds: strin
datasetId: { $in: datasetIds }
});
// delete related files
await Promise.all(
datasetIds.map((id) => delFileByMetadata({ bucketName: BucketNameEnum.dataset, datasetId: id }))
);
await delay(2000);
await delay(500);
// delete pg data
await deletePgDataById(`dataset_id IN ('${datasetIds.join("','")}')`);
// delete dataset.datas
await MongoDatasetData.deleteMany({ datasetId: { $in: datasetIds } });
// delete pg data
await deletePgDataById(`dataset_id IN ('${datasetIds.join("','")}')`);
// delete collections
await MongoDatasetCollection.deleteMany({
datasetId: { $in: datasetIds }
});
// delete related files
await Promise.all(
datasetIds.map((id) => delFileByMetadata({ bucketName: BucketNameEnum.dataset, datasetId: id }))
);
}
/**
* delete all data by collectionIds
@@ -51,6 +51,18 @@ export async function delCollectionRelevantData({
collectionId: { $in: collectionIds }
});
await delay(2000);
// delete dataset.datas
await MongoDatasetData.deleteMany({ collectionId: { $in: collectionIds } });
// delete pg data
await deletePgDataById(`collection_id IN ('${collectionIds.join("','")}')`);
// delete collections
await MongoDatasetCollection.deleteMany({
_id: { $in: collectionIds }
});
// delete file and imgs
await Promise.all([
delImgByFileIdList(filterFileIds),
@@ -59,13 +71,6 @@ export async function delCollectionRelevantData({
fileIdList: filterFileIds
})
]);
await delay(500);
// delete pg data
await deletePgDataById(`collection_id IN ('${collectionIds.join("','")}')`);
// delete dataset.datas
await MongoDatasetData.deleteMany({ collectionId: { $in: collectionIds } });
}
/**
* delete one data by mongoDataId

View File

@@ -71,7 +71,6 @@ const DatasetDataSchema = new Schema({
],
default: []
},
// metadata
updateTime: {
type: Date,
default: () => new Date()
@@ -89,6 +88,7 @@ try {
DatasetDataSchema.index({ teamId: 1 });
DatasetDataSchema.index({ datasetId: 1 });
DatasetDataSchema.index({ collectionId: 1 });
DatasetDataSchema.index({ updateTime: -1 });
// full text index
DatasetDataSchema.index({ datasetId: 1, fullTextToken: 'text' });
DatasetDataSchema.index({ inited: 1 });