Files
FastGPT/packages/service/core/dataset/collection/utils.ts
T
Archer 58000324e2 feature: V4.14.3 (#5970)
* feat(marketplace): update plugin/ download count statistic (#5957)

* feat: download count

* feat: update ui

* fix: ui

* chore: update sdk verison

* chore: update .env.template

* chore: adjust

* chore: remove console.log

* chore: adjust

* Update projects/marketplace/src/pages/index.tsx

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update projects/marketplace/src/pages/index.tsx

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Update projects/app/src/pages/config/tool/marketplace.tsx

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* fix: update refresh; feat: marketplace download count per hour

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* download

* marketplace code

* fix: ui (#5963)

* feat: support dataset and files as global variables (#5961)

* json & dataset

* file

* fix file var

* fix

* fix init

* remove

* perf: file vars

* fix: file uploading errors (#5969)

* fix: file uploading errors

* fix build

* perf: fileselector ux

* feat: integrate S3 for dataset with compatibility (#5941)

* fix: text split

* remove test

* feat: integrate S3 for dataset with compatibility

* fix: delay s3 files delete timing

* fix: remove imageKeys

* fix: remove parsed images' TTL

* fix: improve codes by pr comments

---------

Co-authored-by: archer <545436317@qq.com>

* remove log

* perf: request limit

* chore: s3 migration script (#5971)

* test

* perf: s3 code

* fix: migration script (#5972)

* perf: s3 move object

* wip: fix s3 bugs (#5976)

* fix: incorrect replace origin logic (#5978)

* fix: add downloadURL (#5980)

* perf: file variable ttl & quick create dataset with temp s3 bucket (#5973)

* perf: file variable ttl & quick create dataset with temp s3 bucket

* fix

* plugin & form input variables (#5979)

* plugin & form input variables

* fix

* docs: 4143.mdx (#5981)

* doc: update 4143.mdx (#5982)

* fix form input file ttl (#5983)

* trans file type (#5986)

* trans file type

* fix

* fix: S3 script early return (#5985)

* fix: S3 script typeof

* fix: truncate large filename to fit S3 name

* perf(permission): add a schema verification for resource permission, tmbId, groupId, orgId should be set at least one of them (#5987)

* fix: version & typo (#5988)

* fix-v4.14.3 (#5991)

* fix: empty alt make replace JWT failed & incorrect image dataset preview url (#5989)

* fix: empty alt make replace JWT failed & incorrect image dataset preview url

* fix: s3 files recovery script

* fix: incorrect chat external url parsing (#5993)

---------

Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: Roy <whoeverimf5@gmail.com>
2025-11-26 20:47:28 +08:00

251 lines
6.7 KiB
TypeScript

import { MongoDatasetCollection } from './schema';
import type { ClientSession } from '../../../common/mongo';
import { MongoDatasetCollectionTags } from '../tag/schema';
import { readFromSecondary } from '../../../common/mongo/utils';
import type { CollectionWithDatasetType } from '@fastgpt/global/core/dataset/type';
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type';
import {
DatasetCollectionDataProcessModeEnum,
DatasetCollectionSyncResultEnum,
DatasetCollectionTypeEnum,
DatasetSourceReadTypeEnum,
TrainingModeEnum
} from '@fastgpt/global/core/dataset/constants';
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
import { readDatasetSourceRawText } from '../read';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { mongoSessionRun } from '../../../common/mongo/sessionRun';
import { createCollectionAndInsertData, delCollection } from './controller';
import { collectionCanSync } from '@fastgpt/global/core/dataset/collection/utils';
/**
* get all collection by top collectionId
*/
export async function findCollectionAndChild({
teamId,
datasetId,
collectionId,
fields = '_id parentId name metadata'
}: {
teamId: string;
datasetId: string;
collectionId: string;
fields?: string;
}) {
async function find(id: string) {
// find children
const children = await MongoDatasetCollection.find(
{ teamId, datasetId, parentId: id },
fields
).lean();
let collections = children;
for (const child of children) {
const grandChildrenIds = await find(child._id);
collections = collections.concat(grandChildrenIds);
}
return collections;
}
const [collection, childCollections] = await Promise.all([
MongoDatasetCollection.findById(collectionId, fields).lean(),
find(collectionId)
]);
if (!collection) {
return Promise.reject('Collection not found');
}
return [collection, ...childCollections];
}
export function getCollectionUpdateTime({ name, time }: { time?: Date; name: string }) {
if (time) return time;
if (name.startsWith('手动') || ['manual', 'mark'].includes(name)) return new Date('2999/9/9');
return new Date();
}
export const createOrGetCollectionTags = async ({
tags,
datasetId,
teamId,
session
}: {
tags?: string[];
datasetId: string;
teamId: string;
session?: ClientSession;
}) => {
if (!tags) return undefined;
if (tags.length === 0) return [];
const existingTags = await MongoDatasetCollectionTags.find(
{
teamId,
datasetId,
tag: { $in: tags }
},
undefined,
{ session }
).lean();
const existingTagContents = existingTags.map((tag) => tag.tag);
const newTagContents = tags.filter((tag) => !existingTagContents.includes(tag));
const newTags = await MongoDatasetCollectionTags.insertMany(
newTagContents.map((tagContent) => ({
teamId,
datasetId,
tag: tagContent
})),
{ session, ordered: true }
);
return [...existingTags.map((tag) => tag._id), ...newTags.map((tag) => tag._id)];
};
export const collectionTagsToTagLabel = async ({
datasetId,
tags
}: {
datasetId: string;
tags?: string[];
}) => {
if (!tags) return undefined;
if (tags.length === 0) return;
// Get all the tags
const collectionTags = await MongoDatasetCollectionTags.find({ datasetId }, undefined, {
...readFromSecondary
}).lean();
const tagsMap = new Map<string, string>();
collectionTags.forEach((tag) => {
tagsMap.set(String(tag._id), tag.tag);
});
return tags
.map((tag) => {
return tagsMap.get(tag) || '';
})
.filter(Boolean);
};
export const syncCollection = async (collection: CollectionWithDatasetType) => {
const dataset = collection.dataset;
if (!collectionCanSync(collection.type)) {
return Promise.reject(DatasetErrEnum.notSupportSync);
}
// Get new text
const sourceReadType = await (async () => {
if (collection.type === DatasetCollectionTypeEnum.link) {
if (!collection.rawLink) return Promise.reject('rawLink is missing');
return {
type: DatasetSourceReadTypeEnum.link,
sourceId: collection.rawLink,
selector: collection.metadata?.webPageSelector
};
}
const sourceId = collection.apiFileId;
if (!sourceId) return Promise.reject('apiFileId is missing');
return {
type: DatasetSourceReadTypeEnum.apiFile,
sourceId,
apiDatasetServer: dataset.apiDatasetServer
};
})();
const { title, rawText } = await readDatasetSourceRawText({
teamId: collection.teamId,
tmbId: collection.tmbId,
datasetId: collection.datasetId,
...sourceReadType
});
if (!rawText) {
return DatasetCollectionSyncResultEnum.failed;
}
// Check if the original text is the same: skip if same
const hashRawText = hashStr(rawText);
if (collection.hashRawText && hashRawText !== collection.hashRawText) {
await mongoSessionRun(async (session) => {
// Delete old collection
await delCollection({
collections: [collection],
delImg: false,
delFile: false,
session
});
// Create new collection
await createCollectionAndInsertData({
session,
dataset,
rawText: rawText,
createCollectionParams: {
...collection,
name: title || collection.name,
updateTime: new Date(),
tags: await collectionTagsToTagLabel({
datasetId: collection.datasetId,
tags: collection.tags
})
}
});
});
return DatasetCollectionSyncResultEnum.success;
} else if (collection.name !== title) {
await MongoDatasetCollection.updateOne({ _id: collection._id }, { $set: { name: title } });
return DatasetCollectionSyncResultEnum.success;
}
return DatasetCollectionSyncResultEnum.sameRaw;
};
/*
QA: 独立进程
Chunk: Image Index -> Auto index -> chunk index
*/
export const getTrainingModeByCollection = ({
trainingType,
autoIndexes,
imageIndex
}: {
trainingType: DatasetCollectionDataProcessModeEnum;
autoIndexes?: boolean;
imageIndex?: boolean;
}) => {
if (
trainingType === DatasetCollectionDataProcessModeEnum.imageParse &&
global.feConfigs?.isPlus
) {
return TrainingModeEnum.imageParse;
}
if (trainingType === DatasetCollectionDataProcessModeEnum.qa) {
return TrainingModeEnum.qa;
}
if (
trainingType === DatasetCollectionDataProcessModeEnum.chunk &&
imageIndex &&
global.feConfigs?.isPlus
) {
return TrainingModeEnum.image;
}
if (
trainingType === DatasetCollectionDataProcessModeEnum.chunk &&
autoIndexes &&
global.feConfigs?.isPlus
) {
return TrainingModeEnum.auto;
}
return TrainingModeEnum.chunk;
};