perf: vector generate (#1748)

This commit is contained in:
Archer
2024-06-12 16:42:46 +08:00
committed by GitHub
parent d0085a23e6
commit 05611df056
2 changed files with 109 additions and 109 deletions

View File

@@ -81,50 +81,61 @@ async function handler(
}); });
// get 10 init dataset.data // get 10 init dataset.data
const arr = new Array(10).fill(0); const max = global.systemEnv?.vectorMaxProcess || 10;
for await (const _ of arr) { const arr = new Array(max * 2).fill(0);
await mongoSessionRun(async (session) => {
const data = await MongoDatasetData.findOneAndUpdate(
{
teamId,
datasetId,
rebuilding: true
},
{
$unset: {
rebuilding: null
},
updateTime: new Date()
},
{
session
}
).select({
_id: 1,
collectionId: 1
});
if (data) { for await (const _ of arr) {
await MongoDatasetTraining.create( try {
[ const hasNext = await mongoSessionRun(async (session) => {
{ // get next dataset.data
teamId, const data = await MongoDatasetData.findOneAndUpdate(
tmbId, {
datasetId, rebuilding: true,
collectionId: data.collectionId, teamId,
billId, datasetId
mode: TrainingModeEnum.chunk, },
model: vectorModel, {
q: '1', $unset: {
dataId: data._id rebuilding: null
} },
], updateTime: new Date()
},
{ {
session session
} }
); ).select({
_id: 1,
collectionId: 1
});
if (data) {
await MongoDatasetTraining.create(
[
{
teamId,
tmbId,
datasetId,
collectionId: data.collectionId,
billId,
mode: TrainingModeEnum.chunk,
model: vectorModel,
q: '1',
dataId: data._id
}
],
{
session
}
);
}
return !!data;
});
if (!hasNext) {
break;
} }
}); } catch (error) {}
} }
return {}; return {};

View File

@@ -158,27 +158,69 @@ const rebuildData = async ({
const deleteVectorIdList = mongoData.indexes.map((index) => index.dataId); const deleteVectorIdList = mongoData.indexes.map((index) => index.dataId);
const { tokens } = await mongoSessionRun(async (session) => { // Find next rebuilding data to insert training queue
// update vector, update dataset.data rebuilding status, delete data from training await mongoSessionRun(async (session) => {
const updateResult = await Promise.all( // get new mongoData insert to training
mongoData.indexes.map(async (index, i) => { const newRebuildingData = await MongoDatasetData.findOneAndUpdate(
const result = await insertDatasetDataVector({ {
query: index.text, teamId: mongoData.teamId,
model: getVectorModel(trainingData.model), datasetId: mongoData.datasetId,
teamId: mongoData.teamId, rebuilding: true
datasetId: mongoData.datasetId, },
collectionId: mongoData.collectionId {
}); $unset: {
mongoData.indexes[i].dataId = result.insertId; rebuilding: null
return result; },
}) updateTime: new Date()
); },
{ session }
).select({
_id: 1,
collectionId: 1
});
// Ensure that the training data is deleted after the Mongo update is successful if (newRebuildingData) {
await MongoDatasetTraining.create(
[
{
teamId: mongoData.teamId,
tmbId: trainingData.tmbId,
datasetId: mongoData.datasetId,
collectionId: newRebuildingData.collectionId,
billId: trainingData.billId,
mode: TrainingModeEnum.chunk,
model: trainingData.model,
q: '1',
dataId: newRebuildingData._id
}
],
{ session }
);
}
});
// update vector, update dataset_data rebuilding status, delete data from training
// 1. Insert new vector to dataset_data
const updateResult = await Promise.all(
mongoData.indexes.map(async (index, i) => {
const result = await insertDatasetDataVector({
query: index.text,
model: getVectorModel(trainingData.model),
teamId: mongoData.teamId,
datasetId: mongoData.datasetId,
collectionId: mongoData.collectionId
});
mongoData.indexes[i].dataId = result.insertId;
return result;
})
);
const { tokens } = await mongoSessionRun(async (session) => {
// 2. Ensure that the training data is deleted after the Mongo update is successful
await mongoData.save({ session }); await mongoData.save({ session });
// 3. Delete the training data
await trainingData.deleteOne({ session }); await trainingData.deleteOne({ session });
// delete old vector // 4. Delete old vector
await deleteDatasetDataVector({ await deleteDatasetDataVector({
teamId: mongoData.teamId, teamId: mongoData.teamId,
idList: deleteVectorIdList idList: deleteVectorIdList
@@ -189,59 +231,6 @@ const rebuildData = async ({
}; };
}); });
// find next data insert to training queue
const arr = new Array(5).fill(0);
for await (const _ of arr) {
try {
const hasNextData = await mongoSessionRun(async (session) => {
// get new mongoData insert to training
const newRebuildingData = await MongoDatasetData.findOneAndUpdate(
{
teamId: mongoData.teamId,
datasetId: mongoData.datasetId,
rebuilding: true
},
{
$unset: {
rebuilding: null
},
updateTime: new Date()
},
{ session }
).select({
_id: 1,
collectionId: 1
});
if (newRebuildingData) {
await MongoDatasetTraining.create(
[
{
teamId: mongoData.teamId,
tmbId: trainingData.tmbId,
datasetId: mongoData.datasetId,
collectionId: newRebuildingData.collectionId,
billId: trainingData.billId,
mode: TrainingModeEnum.chunk,
model: trainingData.model,
q: '1',
dataId: newRebuildingData._id
}
],
{ session }
);
}
return !!newRebuildingData;
});
if (!hasNextData) {
break;
}
} catch (error) {}
}
return { tokens }; return { tokens };
}; };