mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-26 15:54:11 +00:00
4.6.7-alpha commit (#743)
Co-authored-by: Archer <545436317@qq.com> Co-authored-by: heheer <71265218+newfish-cmyk@users.noreply.github.com>
This commit is contained in:
@@ -3,9 +3,10 @@ import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import fsp from 'fs/promises';
|
||||
import fs from 'fs';
|
||||
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
|
||||
import { delImgByFileIdList } from '../image/controller';
|
||||
import { MongoFileSchema } from './schema';
|
||||
|
||||
export function getGFSCollection(bucket: `${BucketNameEnum}`) {
|
||||
MongoFileSchema;
|
||||
return connectionMongo.connection.db.collection(`${bucket}.files`);
|
||||
}
|
||||
export function getGridBucket(bucket: `${BucketNameEnum}`) {
|
||||
@@ -21,6 +22,7 @@ export async function uploadFile({
|
||||
tmbId,
|
||||
path,
|
||||
filename,
|
||||
contentType,
|
||||
metadata = {}
|
||||
}: {
|
||||
bucketName: `${BucketNameEnum}`;
|
||||
@@ -28,6 +30,7 @@ export async function uploadFile({
|
||||
tmbId: string;
|
||||
path: string;
|
||||
filename: string;
|
||||
contentType?: string;
|
||||
metadata?: Record<string, any>;
|
||||
}) {
|
||||
if (!path) return Promise.reject(`filePath is empty`);
|
||||
@@ -44,7 +47,7 @@ export async function uploadFile({
|
||||
|
||||
const stream = bucket.openUploadStream(filename, {
|
||||
metadata,
|
||||
contentType: metadata?.contentType
|
||||
contentType
|
||||
});
|
||||
|
||||
// save to gridfs
|
||||
@@ -96,40 +99,6 @@ export async function delFileByFileIdList({
|
||||
}
|
||||
}
|
||||
}
|
||||
// delete file by metadata(datasetId)
|
||||
export async function delFileByMetadata({
|
||||
bucketName,
|
||||
datasetId
|
||||
}: {
|
||||
bucketName: `${BucketNameEnum}`;
|
||||
datasetId?: string;
|
||||
}) {
|
||||
const bucket = getGridBucket(bucketName);
|
||||
|
||||
const files = await bucket
|
||||
.find(
|
||||
{
|
||||
...(datasetId && { 'metadata.datasetId': datasetId })
|
||||
},
|
||||
{
|
||||
projection: {
|
||||
_id: 1
|
||||
}
|
||||
}
|
||||
)
|
||||
.toArray();
|
||||
|
||||
const idList = files.map((item) => String(item._id));
|
||||
|
||||
// delete img
|
||||
await delImgByFileIdList(idList);
|
||||
|
||||
// delete file
|
||||
await delFileByFileIdList({
|
||||
bucketName,
|
||||
fileIdList: idList
|
||||
});
|
||||
}
|
||||
|
||||
export async function getDownloadStream({
|
||||
bucketName,
|
||||
|
15
packages/service/common/file/gridfs/schema.ts
Normal file
15
packages/service/common/file/gridfs/schema.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { connectionMongo, type Model } from '../../mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
|
||||
const FileSchema = new Schema({});
|
||||
|
||||
try {
|
||||
FileSchema.index({ 'metadata.teamId': 1 });
|
||||
FileSchema.index({ 'metadata.uploadDate': -1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoFileSchema = models['dataset.files'] || model('dataset.files', FileSchema);
|
||||
|
||||
MongoFileSchema.syncIndexes();
|
@@ -46,8 +46,8 @@ export async function readMongoImg({ id }: { id: string }) {
|
||||
return data?.binary;
|
||||
}
|
||||
|
||||
export async function delImgByFileIdList(fileIds: string[]) {
|
||||
export async function delImgByRelatedId(relateIds: string[]) {
|
||||
return MongoImage.deleteMany({
|
||||
'metadata.fileId': { $in: fileIds.map((item) => String(item)) }
|
||||
'metadata.relatedId': { $in: relateIds.map((id) => String(id)) }
|
||||
});
|
||||
}
|
||||
|
@@ -35,6 +35,8 @@ try {
|
||||
ImageSchema.index({ expiredTime: 1 }, { expireAfterSeconds: 60 });
|
||||
ImageSchema.index({ type: 1 });
|
||||
ImageSchema.index({ teamId: 1 });
|
||||
ImageSchema.index({ createTime: 1 });
|
||||
ImageSchema.index({ 'metadata.relatedId': 1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -1,68 +0,0 @@
|
||||
import * as pdfjs from 'pdfjs-dist/legacy/build/pdf.mjs';
|
||||
// @ts-ignore
|
||||
import('pdfjs-dist/legacy/build/pdf.worker.min.mjs');
|
||||
import { ReadFileParams } from './type';
|
||||
|
||||
type TokenType = {
|
||||
str: string;
|
||||
dir: string;
|
||||
width: number;
|
||||
height: number;
|
||||
transform: number[];
|
||||
fontName: string;
|
||||
hasEOL: boolean;
|
||||
};
|
||||
|
||||
export const readPdfFile = async ({ path }: ReadFileParams) => {
|
||||
const readPDFPage = async (doc: any, pageNo: number) => {
|
||||
const page = await doc.getPage(pageNo);
|
||||
const tokenizedText = await page.getTextContent();
|
||||
|
||||
const viewport = page.getViewport({ scale: 1 });
|
||||
const pageHeight = viewport.height;
|
||||
const headerThreshold = pageHeight * 0.95;
|
||||
const footerThreshold = pageHeight * 0.05;
|
||||
|
||||
const pageTexts: TokenType[] = tokenizedText.items.filter((token: TokenType) => {
|
||||
return (
|
||||
!token.transform ||
|
||||
(token.transform[5] < headerThreshold && token.transform[5] > footerThreshold)
|
||||
);
|
||||
});
|
||||
|
||||
// concat empty string 'hasEOL'
|
||||
for (let i = 0; i < pageTexts.length; i++) {
|
||||
const item = pageTexts[i];
|
||||
if (item.str === '' && pageTexts[i - 1]) {
|
||||
pageTexts[i - 1].hasEOL = item.hasEOL;
|
||||
pageTexts.splice(i, 1);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
|
||||
page.cleanup();
|
||||
|
||||
return pageTexts
|
||||
.map((token) => {
|
||||
const paragraphEnd = token.hasEOL && /([。?!.?!\n\r]|(\r\n))$/.test(token.str);
|
||||
|
||||
return paragraphEnd ? `${token.str}\n` : token.str;
|
||||
})
|
||||
.join('');
|
||||
};
|
||||
|
||||
const loadingTask = pdfjs.getDocument(path);
|
||||
const doc = await loadingTask.promise;
|
||||
|
||||
const pageTextPromises = [];
|
||||
for (let pageNo = 1; pageNo <= doc.numPages; pageNo++) {
|
||||
pageTextPromises.push(readPDFPage(doc, pageNo));
|
||||
}
|
||||
const pageTexts = await Promise.all(pageTextPromises);
|
||||
|
||||
loadingTask.destroy();
|
||||
|
||||
return {
|
||||
rawText: pageTexts.join('')
|
||||
};
|
||||
};
|
18
packages/service/common/file/load/type.d.ts
vendored
18
packages/service/common/file/load/type.d.ts
vendored
@@ -1,18 +0,0 @@
|
||||
export type ReadFileParams = {
|
||||
preview: boolean;
|
||||
teamId: string;
|
||||
path: string;
|
||||
metadata?: Record<string, any>;
|
||||
};
|
||||
|
||||
export type ReadFileResponse = {
|
||||
rawText: string;
|
||||
};
|
||||
|
||||
export type ReadFileBufferItemType = ReadFileParams & {
|
||||
rawText: string;
|
||||
};
|
||||
|
||||
declare global {
|
||||
var readFileBuffers: ReadFileBufferItemType[];
|
||||
}
|
@@ -1,50 +0,0 @@
|
||||
import { readPdfFile } from './pdf';
|
||||
import { readDocFle } from './word';
|
||||
import { ReadFileBufferItemType, ReadFileParams } from './type';
|
||||
|
||||
global.readFileBuffers = global.readFileBuffers || [];
|
||||
|
||||
const bufferMaxSize = 200;
|
||||
|
||||
export const pushFileReadBuffer = (params: ReadFileBufferItemType) => {
|
||||
global.readFileBuffers.push(params);
|
||||
|
||||
if (global.readFileBuffers.length > bufferMaxSize) {
|
||||
global.readFileBuffers.shift();
|
||||
}
|
||||
};
|
||||
export const getReadFileBuffer = ({ path, teamId }: ReadFileParams) =>
|
||||
global.readFileBuffers.find((item) => item.path === path && item.teamId === teamId);
|
||||
|
||||
export const readFileContent = async (params: ReadFileParams) => {
|
||||
const { path } = params;
|
||||
|
||||
const buffer = getReadFileBuffer(params);
|
||||
|
||||
if (buffer) {
|
||||
return buffer;
|
||||
}
|
||||
|
||||
const extension = path?.split('.')?.pop()?.toLowerCase() || '';
|
||||
|
||||
const { rawText } = await (async () => {
|
||||
switch (extension) {
|
||||
case 'pdf':
|
||||
return readPdfFile(params);
|
||||
case 'docx':
|
||||
return readDocFle(params);
|
||||
default:
|
||||
return Promise.reject('Only support .pdf, .docx');
|
||||
}
|
||||
})();
|
||||
|
||||
pushFileReadBuffer({
|
||||
...params,
|
||||
rawText
|
||||
});
|
||||
|
||||
return {
|
||||
...params,
|
||||
rawText
|
||||
};
|
||||
};
|
@@ -1,22 +0,0 @@
|
||||
import mammoth from 'mammoth';
|
||||
import { htmlToMarkdown } from '../../string/markdown';
|
||||
import { ReadFileParams } from './type';
|
||||
/**
|
||||
* read docx to markdown
|
||||
*/
|
||||
export const readDocFle = async ({ path, metadata = {} }: ReadFileParams) => {
|
||||
try {
|
||||
const { value: html } = await mammoth.convertToHtml({
|
||||
path
|
||||
});
|
||||
|
||||
const md = await htmlToMarkdown(html);
|
||||
|
||||
return {
|
||||
rawText: md
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('error doc read:', error);
|
||||
return Promise.reject('Can not read doc file, please convert to PDF');
|
||||
}
|
||||
};
|
@@ -3,7 +3,6 @@ import multer from 'multer';
|
||||
import path from 'path';
|
||||
import { BucketNameEnum, bucketNameMap } from '@fastgpt/global/common/file/constants';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { tmpFileDirPath } from './constants';
|
||||
|
||||
type FileType = {
|
||||
fieldname: string;
|
||||
@@ -15,8 +14,6 @@ type FileType = {
|
||||
size: number;
|
||||
};
|
||||
|
||||
const expiredTime = 30 * 60 * 1000;
|
||||
|
||||
export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
maxSize *= 1024 * 1024;
|
||||
class UploadModel {
|
||||
@@ -31,15 +28,16 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
// },
|
||||
filename: async (req, file, cb) => {
|
||||
const { ext } = path.parse(decodeURIComponent(file.originalname));
|
||||
cb(null, `${Date.now() + expiredTime}-${getNanoid(32)}${ext}`);
|
||||
cb(null, `${getNanoid(32)}${ext}`);
|
||||
}
|
||||
})
|
||||
}).any();
|
||||
}).single('file');
|
||||
|
||||
async doUpload<T = Record<string, any>>(req: NextApiRequest, res: NextApiResponse) {
|
||||
return new Promise<{
|
||||
files: FileType[];
|
||||
metadata: T;
|
||||
file: FileType;
|
||||
metadata: Record<string, any>;
|
||||
data: T;
|
||||
bucketName?: `${BucketNameEnum}`;
|
||||
}>((resolve, reject) => {
|
||||
// @ts-ignore
|
||||
@@ -54,20 +52,28 @@ export const getUploadModel = ({ maxSize = 500 }: { maxSize?: number }) => {
|
||||
return reject('BucketName is invalid');
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
const file = req.file as FileType;
|
||||
|
||||
resolve({
|
||||
...req.body,
|
||||
files:
|
||||
// @ts-ignore
|
||||
req.files?.map((file) => ({
|
||||
...file,
|
||||
originalname: decodeURIComponent(file.originalname)
|
||||
})) || [],
|
||||
file: {
|
||||
...file,
|
||||
originalname: decodeURIComponent(file.originalname)
|
||||
},
|
||||
bucketName,
|
||||
metadata: (() => {
|
||||
if (!req.body?.metadata) return {};
|
||||
try {
|
||||
return JSON.parse(req.body.metadata);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
return {};
|
||||
}
|
||||
})(),
|
||||
data: (() => {
|
||||
if (!req.body?.data) return {};
|
||||
try {
|
||||
return JSON.parse(req.body.data);
|
||||
} catch (error) {
|
||||
return {};
|
||||
}
|
||||
})()
|
||||
|
@@ -1,5 +1,4 @@
|
||||
import fs from 'fs';
|
||||
import { tmpFileDirPath } from './constants';
|
||||
|
||||
export const removeFilesByPaths = (paths: string[]) => {
|
||||
paths.forEach((path) => {
|
||||
@@ -10,24 +9,3 @@ export const removeFilesByPaths = (paths: string[]) => {
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
/* cron job. check expired tmp files */
|
||||
export const checkExpiredTmpFiles = () => {
|
||||
// get all file name
|
||||
const files = fs.readdirSync(tmpFileDirPath).map((name) => {
|
||||
const timestampStr = name.split('-')[0];
|
||||
const expiredTimestamp = timestampStr ? Number(timestampStr) : 0;
|
||||
|
||||
return {
|
||||
filename: name,
|
||||
expiredTimestamp,
|
||||
path: `${tmpFileDirPath}/${name}`
|
||||
};
|
||||
});
|
||||
|
||||
// count expiredFiles
|
||||
const expiredFiles = files.filter((item) => item.expiredTimestamp < Date.now());
|
||||
|
||||
// remove expiredFiles
|
||||
removeFilesByPaths(expiredFiles.map((item) => item.path));
|
||||
};
|
||||
|
@@ -64,41 +64,39 @@ export const urlsFetch = async ({
|
||||
}: UrlFetchParams): Promise<UrlFetchResponse> => {
|
||||
urlList = urlList.filter((url) => /^(http|https):\/\/[^ "]+$/.test(url));
|
||||
|
||||
const response = (
|
||||
await Promise.all(
|
||||
urlList.map(async (url) => {
|
||||
try {
|
||||
const fetchRes = await axios.get(url, {
|
||||
timeout: 30000
|
||||
});
|
||||
const response = await Promise.all(
|
||||
urlList.map(async (url) => {
|
||||
try {
|
||||
const fetchRes = await axios.get(url, {
|
||||
timeout: 30000
|
||||
});
|
||||
|
||||
const $ = cheerio.load(fetchRes.data);
|
||||
const { title, html, usedSelector } = cheerioToHtml({
|
||||
fetchUrl: url,
|
||||
$,
|
||||
selector
|
||||
});
|
||||
const md = await htmlToMarkdown(html);
|
||||
const $ = cheerio.load(fetchRes.data);
|
||||
const { title, html, usedSelector } = cheerioToHtml({
|
||||
fetchUrl: url,
|
||||
$,
|
||||
selector
|
||||
});
|
||||
const md = await htmlToMarkdown(html);
|
||||
|
||||
return {
|
||||
url,
|
||||
title,
|
||||
content: md,
|
||||
selector: usedSelector
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error, 'fetch error');
|
||||
return {
|
||||
url,
|
||||
title,
|
||||
content: md,
|
||||
selector: usedSelector
|
||||
};
|
||||
} catch (error) {
|
||||
console.log(error, 'fetch error');
|
||||
|
||||
return {
|
||||
url,
|
||||
title: '',
|
||||
content: '',
|
||||
selector: ''
|
||||
};
|
||||
}
|
||||
})
|
||||
)
|
||||
).filter((item) => item.content);
|
||||
return {
|
||||
url,
|
||||
title: '',
|
||||
content: '',
|
||||
selector: ''
|
||||
};
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
return response;
|
||||
};
|
||||
|
@@ -1,21 +1,19 @@
|
||||
export type DeleteDatasetVectorProps = {
|
||||
teamId: string;
|
||||
|
||||
id?: string;
|
||||
datasetIds?: string[];
|
||||
collectionIds?: string[];
|
||||
|
||||
collectionId?: string;
|
||||
dataIds?: string[];
|
||||
idList?: string[];
|
||||
};
|
||||
|
||||
export type InsertVectorProps = {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
dataId: string;
|
||||
};
|
||||
|
||||
export type EmbeddingRecallProps = {
|
||||
similarity?: number;
|
||||
datasetIds: string[];
|
||||
similarity?: number;
|
||||
};
|
||||
|
@@ -10,6 +10,7 @@ const getVectorObj = () => {
|
||||
export const initVectorStore = getVectorObj().init;
|
||||
export const deleteDatasetDataVector = getVectorObj().delete;
|
||||
export const recallFromVectorStore = getVectorObj().recall;
|
||||
export const checkVectorDataExist = getVectorObj().checkDataExist;
|
||||
export const getVectorDataByTime = getVectorObj().getVectorDataByTime;
|
||||
export const getVectorCountByTeamId = getVectorObj().getVectorCountByTeamId;
|
||||
|
||||
@@ -21,7 +22,7 @@ export const insertDatasetDataVector = async ({
|
||||
query: string;
|
||||
model: string;
|
||||
}) => {
|
||||
const { vectors, tokens } = await getVectorsByText({
|
||||
const { vectors, charsLength } = await getVectorsByText({
|
||||
model,
|
||||
input: query
|
||||
});
|
||||
@@ -31,32 +32,27 @@ export const insertDatasetDataVector = async ({
|
||||
});
|
||||
|
||||
return {
|
||||
tokens,
|
||||
charsLength,
|
||||
insertId
|
||||
};
|
||||
};
|
||||
|
||||
export const updateDatasetDataVector = async ({
|
||||
id,
|
||||
query,
|
||||
model
|
||||
}: {
|
||||
...props
|
||||
}: InsertVectorProps & {
|
||||
id: string;
|
||||
query: string;
|
||||
model: string;
|
||||
}) => {
|
||||
// get vector
|
||||
const { vectors, tokens } = await getVectorsByText({
|
||||
model,
|
||||
input: query
|
||||
// insert new vector
|
||||
const { charsLength, insertId } = await insertDatasetDataVector(props);
|
||||
|
||||
// delete old vector
|
||||
await deleteDatasetDataVector({
|
||||
teamId: props.teamId,
|
||||
id
|
||||
});
|
||||
|
||||
await getVectorObj().update({
|
||||
id,
|
||||
vectors
|
||||
});
|
||||
|
||||
return {
|
||||
tokens
|
||||
};
|
||||
return { charsLength, insertId };
|
||||
};
|
||||
|
@@ -1,20 +1,20 @@
|
||||
import {
|
||||
initPg,
|
||||
insertDatasetDataVector,
|
||||
updateDatasetDataVector,
|
||||
deleteDatasetDataVector,
|
||||
embeddingRecall,
|
||||
getVectorDataByTime,
|
||||
getVectorCountByTeamId
|
||||
getVectorCountByTeamId,
|
||||
checkDataExist
|
||||
} from './controller';
|
||||
|
||||
export class PgVector {
|
||||
constructor() {}
|
||||
init = initPg;
|
||||
insert = insertDatasetDataVector;
|
||||
update = updateDatasetDataVector;
|
||||
delete = deleteDatasetDataVector;
|
||||
recall = embeddingRecall;
|
||||
checkDataExist = checkDataExist;
|
||||
getVectorCountByTeamId = getVectorCountByTeamId;
|
||||
getVectorDataByTime = getVectorDataByTime;
|
||||
}
|
||||
|
@@ -4,7 +4,7 @@ import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { PgClient, connectPg } from './index';
|
||||
import { PgSearchRawType } from '@fastgpt/global/core/dataset/api';
|
||||
import { EmbeddingRecallItemType } from '../type';
|
||||
import { DeleteDatasetVectorProps, EmbeddingRecallProps } from '../controller.d';
|
||||
import { DeleteDatasetVectorProps, EmbeddingRecallProps, InsertVectorProps } from '../controller.d';
|
||||
import dayjs from 'dayjs';
|
||||
|
||||
export async function initPg() {
|
||||
@@ -16,11 +16,9 @@ export async function initPg() {
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
vector VECTOR(1536) NOT NULL,
|
||||
team_id VARCHAR(50) NOT NULL,
|
||||
tmb_id VARCHAR(50) NOT NULL,
|
||||
dataset_id VARCHAR(50) NOT NULL,
|
||||
collection_id VARCHAR(50) NOT NULL,
|
||||
data_id VARCHAR(50) NOT NULL,
|
||||
createTime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
createtime TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
`);
|
||||
|
||||
@@ -34,26 +32,21 @@ export async function initPg() {
|
||||
}
|
||||
}
|
||||
|
||||
export const insertDatasetDataVector = async (props: {
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
dataId: string;
|
||||
vectors: number[][];
|
||||
retry?: number;
|
||||
}): Promise<{ insertId: string }> => {
|
||||
const { dataId, teamId, tmbId, datasetId, collectionId, vectors, retry = 3 } = props;
|
||||
export const insertDatasetDataVector = async (
|
||||
props: InsertVectorProps & {
|
||||
vectors: number[][];
|
||||
retry?: number;
|
||||
}
|
||||
): Promise<{ insertId: string }> => {
|
||||
const { teamId, datasetId, collectionId, vectors, retry = 3 } = props;
|
||||
try {
|
||||
const { rows } = await PgClient.insert(PgDatasetTableName, {
|
||||
values: [
|
||||
[
|
||||
{ key: 'vector', value: `[${vectors[0]}]` },
|
||||
{ key: 'team_id', value: String(teamId) },
|
||||
{ key: 'tmb_id', value: String(tmbId) },
|
||||
{ key: 'dataset_id', value: datasetId },
|
||||
{ key: 'collection_id', value: collectionId },
|
||||
{ key: 'data_id', value: String(dataId) }
|
||||
{ key: 'collection_id', value: collectionId }
|
||||
]
|
||||
]
|
||||
});
|
||||
@@ -72,48 +65,33 @@ export const insertDatasetDataVector = async (props: {
|
||||
}
|
||||
};
|
||||
|
||||
export const updateDatasetDataVector = async (props: {
|
||||
id: string;
|
||||
vectors: number[][];
|
||||
retry?: number;
|
||||
}): Promise<void> => {
|
||||
const { id, vectors, retry = 2 } = props;
|
||||
try {
|
||||
// update pg
|
||||
await PgClient.update(PgDatasetTableName, {
|
||||
where: [['id', id]],
|
||||
values: [{ key: 'vector', value: `[${vectors[0]}]` }]
|
||||
});
|
||||
} catch (error) {
|
||||
if (retry <= 0) {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
await delay(500);
|
||||
return updateDatasetDataVector({
|
||||
...props,
|
||||
retry: retry - 1
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
export const deleteDatasetDataVector = async (
|
||||
props: DeleteDatasetVectorProps & {
|
||||
retry?: number;
|
||||
}
|
||||
): Promise<any> => {
|
||||
const { id, datasetIds, collectionIds, collectionId, dataIds, retry = 2 } = props;
|
||||
const { teamId, id, datasetIds, collectionIds, idList, retry = 2 } = props;
|
||||
|
||||
const teamIdWhere = `team_id='${String(teamId)}' AND`;
|
||||
|
||||
const where = await (() => {
|
||||
if (id) return `id=${id}`;
|
||||
if (datasetIds) return `dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})`;
|
||||
if (collectionIds) {
|
||||
return `collection_id IN (${collectionIds.map((id) => `'${String(id)}'`).join(',')})`;
|
||||
}
|
||||
if (collectionId && dataIds) {
|
||||
return `collection_id='${String(collectionId)}' and data_id IN (${dataIds
|
||||
if (id) return `${teamIdWhere} id=${id}`;
|
||||
|
||||
if (datasetIds) {
|
||||
return `${teamIdWhere} dataset_id IN (${datasetIds
|
||||
.map((id) => `'${String(id)}'`)
|
||||
.join(',')})`;
|
||||
}
|
||||
|
||||
if (collectionIds) {
|
||||
return `${teamIdWhere} collection_id IN (${collectionIds
|
||||
.map((id) => `'${String(id)}'`)
|
||||
.join(',')})`;
|
||||
}
|
||||
|
||||
if (idList) {
|
||||
return `${teamIdWhere} id IN (${idList.map((id) => `'${String(id)}'`).join(',')})`;
|
||||
}
|
||||
return Promise.reject('deleteDatasetData: no where');
|
||||
})();
|
||||
|
||||
@@ -142,13 +120,13 @@ export const embeddingRecall = async (
|
||||
): Promise<{
|
||||
results: EmbeddingRecallItemType[];
|
||||
}> => {
|
||||
const { vectors, limit, similarity = 0, datasetIds, retry = 2 } = props;
|
||||
const { datasetIds, vectors, limit, similarity = 0, retry = 2 } = props;
|
||||
|
||||
try {
|
||||
const results: any = await PgClient.query(
|
||||
`BEGIN;
|
||||
SET LOCAL hnsw.ef_search = ${global.systemEnv.pgHNSWEfSearch || 100};
|
||||
select id, collection_id, data_id, (vector <#> '[${vectors[0]}]') * -1 AS score
|
||||
select id, collection_id, (vector <#> '[${vectors[0]}]') * -1 AS score
|
||||
from ${PgDatasetTableName}
|
||||
where dataset_id IN (${datasetIds.map((id) => `'${String(id)}'`).join(',')})
|
||||
AND vector <#> '[${vectors[0]}]' < -${similarity}
|
||||
@@ -158,21 +136,10 @@ export const embeddingRecall = async (
|
||||
|
||||
const rows = results?.[2]?.rows as PgSearchRawType[];
|
||||
|
||||
// concat same data_id
|
||||
const filterRows: PgSearchRawType[] = [];
|
||||
let set = new Set<string>();
|
||||
for (const row of rows) {
|
||||
if (!set.has(row.data_id)) {
|
||||
filterRows.push(row);
|
||||
set.add(row.data_id);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
results: filterRows.map((item) => ({
|
||||
results: rows.map((item) => ({
|
||||
id: item.id,
|
||||
collectionId: item.collection_id,
|
||||
dataId: item.data_id,
|
||||
score: item.score
|
||||
}))
|
||||
};
|
||||
@@ -184,7 +151,11 @@ export const embeddingRecall = async (
|
||||
}
|
||||
};
|
||||
|
||||
// bill
|
||||
export const checkDataExist = async (id: string) => {
|
||||
const { rows } = await PgClient.query(`SELECT id FROM ${PgDatasetTableName} WHERE id=${id};`);
|
||||
|
||||
return rows.length > 0;
|
||||
};
|
||||
export const getVectorCountByTeamId = async (teamId: string) => {
|
||||
const total = await PgClient.count(PgDatasetTableName, {
|
||||
where: [['team_id', String(teamId)]]
|
||||
@@ -193,15 +164,20 @@ export const getVectorCountByTeamId = async (teamId: string) => {
|
||||
return total;
|
||||
};
|
||||
export const getVectorDataByTime = async (start: Date, end: Date) => {
|
||||
const { rows } = await PgClient.query<{ id: string; data_id: string }>(`SELECT id, data_id
|
||||
const { rows } = await PgClient.query<{
|
||||
id: string;
|
||||
team_id: string;
|
||||
dataset_id: string;
|
||||
}>(`SELECT id, team_id, dataset_id
|
||||
FROM ${PgDatasetTableName}
|
||||
WHERE createTime BETWEEN '${dayjs(start).format('YYYY-MM-DD')}' AND '${dayjs(end).format(
|
||||
'YYYY-MM-DD 23:59:59'
|
||||
WHERE createtime BETWEEN '${dayjs(start).format('YYYY-MM-DD HH:mm:ss')}' AND '${dayjs(end).format(
|
||||
'YYYY-MM-DD HH:mm:ss'
|
||||
)}';
|
||||
`);
|
||||
|
||||
return rows.map((item) => ({
|
||||
id: item.id,
|
||||
dataId: item.data_id
|
||||
datasetId: item.dataset_id,
|
||||
teamId: item.team_id
|
||||
}));
|
||||
};
|
||||
|
@@ -7,6 +7,5 @@ declare global {
|
||||
export type EmbeddingRecallItemType = {
|
||||
id: string;
|
||||
collectionId: string;
|
||||
dataId: string;
|
||||
score: number;
|
||||
};
|
||||
|
@@ -18,10 +18,9 @@ export async function getVectorsByText({
|
||||
}
|
||||
|
||||
try {
|
||||
// 获取 chatAPI
|
||||
const ai = getAIApi();
|
||||
|
||||
// 把输入的内容转成向量
|
||||
// input text to vector
|
||||
const result = await ai.embeddings
|
||||
.create({
|
||||
model,
|
||||
@@ -38,7 +37,7 @@ export async function getVectorsByText({
|
||||
}
|
||||
|
||||
return {
|
||||
tokens: res.usage.total_tokens || 0,
|
||||
charsLength: input.length,
|
||||
vectors: await Promise.all(res.data.map((item) => unityDimensional(item.embedding)))
|
||||
};
|
||||
});
|
||||
@@ -53,7 +52,9 @@ export async function getVectorsByText({
|
||||
|
||||
function unityDimensional(vector: number[]) {
|
||||
if (vector.length > 1536) {
|
||||
console.log(`当前向量维度为: ${vector.length}, 向量维度不能超过 1536, 已自动截取前 1536 维度`);
|
||||
console.log(
|
||||
`The current vector dimension is ${vector.length}, and the vector dimension cannot exceed 1536. The first 1536 dimensions are automatically captured`
|
||||
);
|
||||
return vector.slice(0, 1536);
|
||||
}
|
||||
let resultVector = vector;
|
||||
|
@@ -11,6 +11,8 @@ import { appCollectionName } from '../app/schema';
|
||||
import { userCollectionName } from '../../support/user/schema';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
export const ChatItemCollectionName = 'chatitems';
|
||||
|
||||
const ChatItemSchema = new Schema({
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
@@ -79,20 +81,23 @@ const ChatItemSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
ChatItemSchema.index({ teamId: 1 });
|
||||
ChatItemSchema.index({ time: -1 });
|
||||
ChatItemSchema.index({ appId: 1 });
|
||||
ChatItemSchema.index({ chatId: 1 });
|
||||
ChatItemSchema.index({ obj: 1 });
|
||||
ChatItemSchema.index({ userGoodFeedback: 1 });
|
||||
ChatItemSchema.index({ userBadFeedback: 1 });
|
||||
ChatItemSchema.index({ customFeedbacks: 1 });
|
||||
ChatItemSchema.index({ adminFeedback: 1 });
|
||||
ChatItemSchema.index({ dataId: 1 }, { background: true });
|
||||
/* delete by app;
|
||||
delete by chat id;
|
||||
get chat list;
|
||||
get chat logs;
|
||||
close custom feedback;
|
||||
*/
|
||||
ChatItemSchema.index({ appId: 1, chatId: 1, dataId: 1 }, { background: true });
|
||||
ChatItemSchema.index({ userGoodFeedback: 1 }, { background: true });
|
||||
ChatItemSchema.index({ userBadFeedback: 1 }, { background: true });
|
||||
ChatItemSchema.index({ customFeedbacks: 1 }, { background: true });
|
||||
ChatItemSchema.index({ adminFeedback: 1 }, { background: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoChatItem: Model<ChatItemType> =
|
||||
models['chatItem'] || model('chatItem', ChatItemSchema);
|
||||
models[ChatItemCollectionName] || model(ChatItemCollectionName, ChatItemSchema);
|
||||
|
||||
MongoChatItem.syncIndexes();
|
||||
|
@@ -1,13 +1,12 @@
|
||||
import { connectionMongo, type Model } from '../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { ChatSchema as ChatType } from '@fastgpt/global/core/chat/type.d';
|
||||
import { ChatRoleMap, ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import { ChatSourceMap } from '@fastgpt/global/core/chat/constants';
|
||||
import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
} from '@fastgpt/global/support/user/team/constant';
|
||||
import { appCollectionName } from '../app/schema';
|
||||
import { ModuleOutputKeyEnum } from '@fastgpt/global/core/module/constants';
|
||||
|
||||
export const chatCollectionName = 'chat';
|
||||
|
||||
@@ -48,7 +47,8 @@ const ChatSchema = new Schema({
|
||||
default: ''
|
||||
},
|
||||
top: {
|
||||
type: Boolean
|
||||
type: Boolean,
|
||||
default: false
|
||||
},
|
||||
source: {
|
||||
type: String,
|
||||
@@ -73,10 +73,16 @@ const ChatSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
ChatSchema.index({ appId: 1 });
|
||||
ChatSchema.index({ tmbId: 1 });
|
||||
ChatSchema.index({ shareId: 1 });
|
||||
ChatSchema.index({ updateTime: -1 });
|
||||
ChatSchema.index({ chatId: 1 }, { background: true });
|
||||
// get user history
|
||||
ChatSchema.index({ tmbId: 1, appId: 1, top: -1, updateTime: -1 }, { background: true });
|
||||
// delete by appid; clear history; init chat; update chat; auth chat;
|
||||
ChatSchema.index({ appId: 1, chatId: 1 }, { background: true });
|
||||
|
||||
// get chat logs;
|
||||
ChatSchema.index({ teamId: 1, appId: 1, updateTime: -1 }, { background: true });
|
||||
// get share chat history
|
||||
ChatSchema.index({ shareId: 1, outLinkUid: 1 }, { background: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -3,10 +3,12 @@ import { MongoChatItem } from './chatItemSchema';
|
||||
import { addLog } from '../../common/system/log';
|
||||
|
||||
export async function getChatItems({
|
||||
appId,
|
||||
chatId,
|
||||
limit = 30,
|
||||
field
|
||||
}: {
|
||||
appId: string;
|
||||
chatId?: string;
|
||||
limit?: number;
|
||||
field: string;
|
||||
@@ -15,7 +17,10 @@ export async function getChatItems({
|
||||
return { history: [] };
|
||||
}
|
||||
|
||||
const history = await MongoChatItem.find({ chatId }, field).sort({ _id: -1 }).limit(limit).lean();
|
||||
const history = await MongoChatItem.find({ appId, chatId }, field)
|
||||
.sort({ _id: -1 })
|
||||
.limit(limit)
|
||||
.lean();
|
||||
|
||||
history.reverse();
|
||||
|
||||
@@ -23,10 +28,12 @@ export async function getChatItems({
|
||||
}
|
||||
|
||||
export const addCustomFeedbacks = async ({
|
||||
appId,
|
||||
chatId,
|
||||
chatItemId,
|
||||
feedbacks
|
||||
}: {
|
||||
appId: string;
|
||||
chatId?: string;
|
||||
chatItemId?: string;
|
||||
feedbacks: string[];
|
||||
|
@@ -1,6 +1,20 @@
|
||||
import { TrainingModeEnum, DatasetCollectionTypeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import {
|
||||
TrainingModeEnum,
|
||||
DatasetCollectionTypeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import type { CreateDatasetCollectionParams } from '@fastgpt/global/core/dataset/api.d';
|
||||
import { MongoDatasetCollection } from './schema';
|
||||
import {
|
||||
CollectionWithDatasetType,
|
||||
DatasetCollectionSchemaType
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetTraining } from '../training/schema';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { MongoDatasetData } from '../data/schema';
|
||||
import { delImgByRelatedId } from '../../../common/file/image/controller';
|
||||
import { deleteDatasetDataVector } from '../../../common/vectorStore/controller';
|
||||
import { delFileByFileIdList } from '../../../common/file/gridfs/controller';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
|
||||
export async function createOneCollection({
|
||||
teamId,
|
||||
@@ -85,20 +99,50 @@ export function createDefaultCollection({
|
||||
});
|
||||
}
|
||||
|
||||
// check same collection
|
||||
export const getSameRawTextCollection = async ({
|
||||
datasetId,
|
||||
hashRawText
|
||||
/**
|
||||
* delete collection and it related data
|
||||
*/
|
||||
export async function delCollectionAndRelatedSources({
|
||||
collections
|
||||
}: {
|
||||
datasetId: string;
|
||||
hashRawText?: string;
|
||||
}) => {
|
||||
if (!hashRawText) return undefined;
|
||||
collections: (CollectionWithDatasetType | DatasetCollectionSchemaType)[];
|
||||
}) {
|
||||
if (collections.length === 0) return;
|
||||
|
||||
const collection = await MongoDatasetCollection.findOne({
|
||||
datasetId,
|
||||
hashRawText
|
||||
const teamId = collections[0].teamId;
|
||||
|
||||
if (!teamId) return Promise.reject('teamId is not exist');
|
||||
|
||||
const collectionIds = collections.map((item) => String(item._id));
|
||||
const fileIdList = collections.map((item) => item?.fileId || '').filter(Boolean);
|
||||
const relatedImageIds = collections
|
||||
.map((item) => item?.metadata?.relatedImgId || '')
|
||||
.filter(Boolean);
|
||||
|
||||
// delete training data
|
||||
await MongoDatasetTraining.deleteMany({
|
||||
teamId,
|
||||
collectionId: { $in: collectionIds }
|
||||
});
|
||||
|
||||
return collection;
|
||||
};
|
||||
await delay(2000);
|
||||
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ teamId, collectionId: { $in: collectionIds } });
|
||||
// delete pg data
|
||||
await deleteDatasetDataVector({ teamId, collectionIds });
|
||||
|
||||
// delete file and imgs
|
||||
await Promise.all([
|
||||
delImgByRelatedId(relatedImageIds),
|
||||
delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileIdList
|
||||
})
|
||||
]);
|
||||
|
||||
// delete collections
|
||||
await MongoDatasetCollection.deleteMany({
|
||||
_id: { $in: collectionIds }
|
||||
});
|
||||
}
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { connectionMongo, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetCollectionSchemaType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { TrainingTypeMap, DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constant';
|
||||
import { TrainingTypeMap, DatasetCollectionTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||
import { DatasetCollectionName } from '../schema';
|
||||
import {
|
||||
TeamCollectionName,
|
||||
@@ -91,11 +91,19 @@ const DatasetCollectionSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
DatasetCollectionSchema.index({ teamId: 1 });
|
||||
DatasetCollectionSchema.index({ datasetId: 1 });
|
||||
DatasetCollectionSchema.index({ teamId: 1, datasetId: 1, parentId: 1 });
|
||||
DatasetCollectionSchema.index({ updateTime: -1 });
|
||||
DatasetCollectionSchema.index({ hashRawText: -1 });
|
||||
// auth file
|
||||
DatasetCollectionSchema.index({ teamId: 1, fileId: 1 }, { background: true });
|
||||
|
||||
// list collection; deep find collections
|
||||
DatasetCollectionSchema.index(
|
||||
{
|
||||
teamId: 1,
|
||||
datasetId: 1,
|
||||
parentId: 1,
|
||||
updateTime: -1
|
||||
},
|
||||
{ background: true }
|
||||
);
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -4,16 +4,32 @@ import type { ParentTreePathItemType } from '@fastgpt/global/common/parentFolder
|
||||
import { splitText2Chunks } from '@fastgpt/global/common/string/textSplitter';
|
||||
import { MongoDatasetTraining } from '../training/schema';
|
||||
import { urlsFetch } from '../../../common/string/cheerio';
|
||||
import { DatasetCollectionTypeEnum, TrainingModeEnum } from '@fastgpt/global/core/dataset/constant';
|
||||
import {
|
||||
DatasetCollectionTypeEnum,
|
||||
TrainingModeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
|
||||
/**
|
||||
* get all collection by top collectionId
|
||||
*/
|
||||
export async function findCollectionAndChild(id: string, fields = '_id parentId name metadata') {
|
||||
export async function findCollectionAndChild({
|
||||
teamId,
|
||||
datasetId,
|
||||
collectionId,
|
||||
fields = '_id parentId name metadata'
|
||||
}: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
collectionId: string;
|
||||
fields?: string;
|
||||
}) {
|
||||
async function find(id: string) {
|
||||
// find children
|
||||
const children = await MongoDatasetCollection.find({ parentId: id }, fields);
|
||||
const children = await MongoDatasetCollection.find(
|
||||
{ teamId, datasetId, parentId: id },
|
||||
fields
|
||||
).lean();
|
||||
|
||||
let collections = children;
|
||||
|
||||
@@ -25,8 +41,8 @@ export async function findCollectionAndChild(id: string, fields = '_id parentId
|
||||
return collections;
|
||||
}
|
||||
const [collection, childCollections] = await Promise.all([
|
||||
MongoDatasetCollection.findById(id, fields),
|
||||
find(id)
|
||||
MongoDatasetCollection.findById(collectionId, fields),
|
||||
find(collectionId)
|
||||
]);
|
||||
|
||||
if (!collection) {
|
||||
@@ -107,8 +123,8 @@ export const getCollectionAndRawText = async ({
|
||||
});
|
||||
|
||||
return {
|
||||
title: result[0].title,
|
||||
rawText: result[0].content
|
||||
title: result[0]?.title,
|
||||
rawText: result[0]?.content
|
||||
};
|
||||
}
|
||||
|
||||
@@ -121,7 +137,7 @@ export const getCollectionAndRawText = async ({
|
||||
})();
|
||||
|
||||
const hashRawText = hashStr(rawText);
|
||||
const isSameRawText = col.hashRawText === hashRawText;
|
||||
const isSameRawText = rawText && col.hashRawText === hashRawText;
|
||||
|
||||
return {
|
||||
collection: col,
|
||||
@@ -161,8 +177,7 @@ export const reloadCollectionChunks = async ({
|
||||
// split data
|
||||
const { chunks } = splitText2Chunks({
|
||||
text: newRawText,
|
||||
chunkLen: col.chunkSize || 512,
|
||||
countTokens: false
|
||||
chunkLen: col.chunkSize || 512
|
||||
});
|
||||
|
||||
// insert to training queue
|
||||
|
@@ -1,24 +1,47 @@
|
||||
import { CollectionWithDatasetType } from '@fastgpt/global/core/dataset/type';
|
||||
import { CollectionWithDatasetType, DatasetSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { MongoDatasetCollection } from './collection/schema';
|
||||
import { MongoDataset } from './schema';
|
||||
import { delCollectionAndRelatedSources } from './collection/controller';
|
||||
|
||||
/* ============= dataset ========== */
|
||||
/* find all datasetId by top datasetId */
|
||||
export async function findDatasetIdTreeByTopDatasetId(
|
||||
id: string,
|
||||
result: string[] = []
|
||||
): Promise<string[]> {
|
||||
let allChildrenIds = [...result];
|
||||
export async function findDatasetAndAllChildren({
|
||||
teamId,
|
||||
datasetId,
|
||||
fields
|
||||
}: {
|
||||
teamId: string;
|
||||
datasetId: string;
|
||||
fields?: string;
|
||||
}): Promise<DatasetSchemaType[]> {
|
||||
const find = async (id: string) => {
|
||||
const children = await MongoDataset.find(
|
||||
{
|
||||
teamId,
|
||||
parentId: id
|
||||
},
|
||||
fields
|
||||
).lean();
|
||||
|
||||
// find children
|
||||
const children = await MongoDataset.find({ parentId: id });
|
||||
let datasets = children;
|
||||
|
||||
for (const child of children) {
|
||||
const grandChildrenIds = await findDatasetIdTreeByTopDatasetId(child._id, result);
|
||||
allChildrenIds = allChildrenIds.concat(grandChildrenIds);
|
||||
for (const child of children) {
|
||||
const grandChildrenIds = await find(child._id);
|
||||
datasets = datasets.concat(grandChildrenIds);
|
||||
}
|
||||
|
||||
return datasets;
|
||||
};
|
||||
const [dataset, childDatasets] = await Promise.all([
|
||||
MongoDataset.findById(datasetId),
|
||||
find(datasetId)
|
||||
]);
|
||||
|
||||
if (!dataset) {
|
||||
return Promise.reject('Dataset not found');
|
||||
}
|
||||
|
||||
return [String(id), ...allChildrenIds];
|
||||
return [dataset, ...childDatasets];
|
||||
}
|
||||
|
||||
export async function getCollectionWithDataset(collectionId: string) {
|
||||
@@ -30,3 +53,22 @@ export async function getCollectionWithDataset(collectionId: string) {
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/* delete all data by datasetIds */
|
||||
export async function delDatasetRelevantData({ datasets }: { datasets: DatasetSchemaType[] }) {
|
||||
if (!datasets.length) return;
|
||||
|
||||
const teamId = datasets[0].teamId;
|
||||
const datasetIds = datasets.map((item) => String(item._id));
|
||||
|
||||
// Get _id, teamId, fileId, metadata.relatedImgId for all collections
|
||||
const collections = await MongoDatasetCollection.find(
|
||||
{
|
||||
teamId,
|
||||
datasetId: { $in: datasetIds }
|
||||
},
|
||||
'_id teamId fileId metadata'
|
||||
).lean();
|
||||
|
||||
await delCollectionAndRelatedSources({ collections });
|
||||
}
|
||||
|
@@ -1,87 +1,2 @@
|
||||
import { MongoDatasetData } from './schema';
|
||||
import { MongoDatasetTraining } from '../training/schema';
|
||||
import { delFileByFileIdList, delFileByMetadata } from '../../../common/file/gridfs/controller';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { MongoDatasetCollection } from '../collection/schema';
|
||||
import { delay } from '@fastgpt/global/common/system/utils';
|
||||
import { delImgByFileIdList } from '../../../common/file/image/controller';
|
||||
import { deleteDatasetDataVector } from '../../../common/vectorStore/controller';
|
||||
|
||||
/* delete all data by datasetIds */
|
||||
export async function delDatasetRelevantData({ datasetIds }: { datasetIds: string[] }) {
|
||||
datasetIds = datasetIds.map((item) => String(item));
|
||||
|
||||
// delete training data(There could be a training mission)
|
||||
await MongoDatasetTraining.deleteMany({
|
||||
datasetId: { $in: datasetIds }
|
||||
});
|
||||
|
||||
await delay(2000);
|
||||
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ datasetId: { $in: datasetIds } });
|
||||
// delete pg data
|
||||
await deleteDatasetDataVector({ datasetIds });
|
||||
|
||||
// delete collections
|
||||
await MongoDatasetCollection.deleteMany({
|
||||
datasetId: { $in: datasetIds }
|
||||
});
|
||||
|
||||
// delete related files
|
||||
await Promise.all(
|
||||
datasetIds.map((id) => delFileByMetadata({ bucketName: BucketNameEnum.dataset, datasetId: id }))
|
||||
);
|
||||
}
|
||||
/**
|
||||
* delete all data by collectionIds
|
||||
*/
|
||||
export async function delCollectionRelevantData({
|
||||
collectionIds,
|
||||
fileIds
|
||||
}: {
|
||||
collectionIds: string[];
|
||||
fileIds: string[];
|
||||
}) {
|
||||
collectionIds = collectionIds.filter(Boolean).map((item) => String(item));
|
||||
const filterFileIds = fileIds.filter(Boolean).map((item) => String(item));
|
||||
|
||||
// delete training data
|
||||
await MongoDatasetTraining.deleteMany({
|
||||
collectionId: { $in: collectionIds }
|
||||
});
|
||||
|
||||
await delay(2000);
|
||||
|
||||
// delete dataset.datas
|
||||
await MongoDatasetData.deleteMany({ collectionId: { $in: collectionIds } });
|
||||
// delete pg data
|
||||
await deleteDatasetDataVector({ collectionIds });
|
||||
|
||||
// delete collections
|
||||
await MongoDatasetCollection.deleteMany({
|
||||
_id: { $in: collectionIds }
|
||||
});
|
||||
|
||||
// delete file and imgs
|
||||
await Promise.all([
|
||||
delImgByFileIdList(filterFileIds),
|
||||
delFileByFileIdList({
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
fileIdList: filterFileIds
|
||||
})
|
||||
]);
|
||||
}
|
||||
/**
|
||||
* delete one data by mongoDataId
|
||||
*/
|
||||
export async function delDatasetDataByDataId({
|
||||
collectionId,
|
||||
mongoDataId
|
||||
}: {
|
||||
collectionId: string;
|
||||
mongoDataId: string;
|
||||
}) {
|
||||
await deleteDatasetDataVector({ collectionId, dataIds: [mongoDataId] });
|
||||
await MongoDatasetData.findByIdAndDelete(mongoDataId);
|
||||
}
|
||||
|
@@ -10,7 +10,7 @@ import { DatasetColCollectionName } from '../collection/schema';
|
||||
import {
|
||||
DatasetDataIndexTypeEnum,
|
||||
DatasetDataIndexTypeMap
|
||||
} from '@fastgpt/global/core/dataset/constant';
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
|
||||
export const DatasetDataCollectionName = 'dataset.datas';
|
||||
|
||||
@@ -71,6 +71,7 @@ const DatasetDataSchema = new Schema({
|
||||
],
|
||||
default: []
|
||||
},
|
||||
|
||||
updateTime: {
|
||||
type: Date,
|
||||
default: () => new Date()
|
||||
@@ -85,13 +86,18 @@ const DatasetDataSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
DatasetDataSchema.index({ teamId: 1 });
|
||||
DatasetDataSchema.index({ datasetId: 1 });
|
||||
DatasetDataSchema.index({ collectionId: 1 });
|
||||
DatasetDataSchema.index({ updateTime: -1 });
|
||||
DatasetDataSchema.index({ collectionId: 1, q: 1, a: 1 });
|
||||
// same data check
|
||||
DatasetDataSchema.index({ teamId: 1, collectionId: 1, q: 1, a: 1 }, { background: true });
|
||||
// list collection and count data; list data
|
||||
DatasetDataSchema.index(
|
||||
{ teamId: 1, datasetId: 1, collectionId: 1, chunkIndex: 1, updateTime: -1 },
|
||||
{ background: true }
|
||||
);
|
||||
// full text index
|
||||
DatasetDataSchema.index({ datasetId: 1, fullTextToken: 'text' });
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' }, { background: true });
|
||||
// Recall vectors after data matching
|
||||
DatasetDataSchema.index({ teamId: 1, datasetId: 1, 'indexes.dataId': 1 }, { background: true });
|
||||
DatasetDataSchema.index({ updateTime: 1 }, { background: true });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -5,7 +5,7 @@ import {
|
||||
DatasetStatusEnum,
|
||||
DatasetStatusMap,
|
||||
DatasetTypeMap
|
||||
} from '@fastgpt/global/core/dataset/constant';
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import {
|
||||
TeamCollectionName,
|
||||
TeamMemberCollectionName
|
||||
|
@@ -2,7 +2,7 @@
|
||||
import { connectionMongo, type Model } from '../../../common/mongo';
|
||||
const { Schema, model, models } = connectionMongo;
|
||||
import { DatasetTrainingSchemaType } from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetDataIndexTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constant';
|
||||
import { DatasetDataIndexTypeMap, TrainingTypeMap } from '@fastgpt/global/core/dataset/constants';
|
||||
import { DatasetColCollectionName } from '../collection/schema';
|
||||
import { DatasetCollectionName } from '../schema';
|
||||
import {
|
||||
@@ -102,11 +102,11 @@ const TrainingDataSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
TrainingDataSchema.index({ teamId: 1 });
|
||||
// lock training data; delete training data
|
||||
TrainingDataSchema.index({ teamId: 1, collectionId: 1 });
|
||||
// get training data and sort
|
||||
TrainingDataSchema.index({ weight: -1 });
|
||||
TrainingDataSchema.index({ lockTime: 1 });
|
||||
TrainingDataSchema.index({ datasetId: 1 });
|
||||
TrainingDataSchema.index({ collectionId: 1 });
|
||||
TrainingDataSchema.index({ expireAt: 1 }, { expireAfterSeconds: 7 * 24 * 60 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
|
@@ -9,13 +9,11 @@
|
||||
"dayjs": "^1.11.7",
|
||||
"encoding": "^0.1.13",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"mammoth": "^1.6.0",
|
||||
"mongoose": "^7.0.2",
|
||||
"multer": "1.4.5-lts.1",
|
||||
"next": "13.5.2",
|
||||
"nextjs-cors": "^2.1.2",
|
||||
"node-cron": "^3.0.3",
|
||||
"pdfjs-dist": "^4.0.269",
|
||||
"pg": "^8.10.0",
|
||||
"tunnel": "^0.0.6"
|
||||
},
|
||||
|
@@ -6,7 +6,7 @@ import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||
import { parseHeaderCert } from '../controller';
|
||||
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
|
||||
// 模型使用权校验
|
||||
export async function authApp({
|
||||
@@ -24,7 +24,7 @@ export async function authApp({
|
||||
> {
|
||||
const result = await parseHeaderCert(props);
|
||||
const { teamId, tmbId } = result;
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { app, isOwner, canWrite } = await (async () => {
|
||||
// get app
|
||||
|
@@ -13,8 +13,9 @@ import {
|
||||
} from '@fastgpt/global/core/dataset/type';
|
||||
import { getFileById } from '../../../common/file/gridfs/controller';
|
||||
import { BucketNameEnum } from '@fastgpt/global/common/file/constants';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
|
||||
import { MongoDatasetCollection } from '../../../core/dataset/collection/schema';
|
||||
|
||||
export async function authDatasetByTmbId({
|
||||
teamId,
|
||||
@@ -27,7 +28,7 @@ export async function authDatasetByTmbId({
|
||||
datasetId: string;
|
||||
per: AuthModeType['per'];
|
||||
}) {
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { dataset, isOwner, canWrite } = await (async () => {
|
||||
const dataset = await MongoDataset.findOne({ _id: datasetId, teamId }).lean();
|
||||
@@ -107,7 +108,7 @@ export async function authDatasetCollection({
|
||||
}
|
||||
> {
|
||||
const { userId, teamId, tmbId } = await parseHeaderCert(props);
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { collection, isOwner, canWrite } = await (async () => {
|
||||
const collection = await getCollectionWithDataset(collectionId);
|
||||
@@ -163,47 +164,40 @@ export async function authDatasetFile({
|
||||
}
|
||||
> {
|
||||
const { userId, teamId, tmbId } = await parseHeaderCert(props);
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
|
||||
const file = await getFileById({ bucketName: BucketNameEnum.dataset, fileId });
|
||||
const [file, collection] = await Promise.all([
|
||||
getFileById({ bucketName: BucketNameEnum.dataset, fileId }),
|
||||
MongoDatasetCollection.findOne({
|
||||
teamId,
|
||||
fileId
|
||||
})
|
||||
]);
|
||||
|
||||
if (!file) {
|
||||
return Promise.reject(CommonErrEnum.fileNotFound);
|
||||
}
|
||||
|
||||
if (file.metadata.teamId !== teamId) {
|
||||
if (!collection) {
|
||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||
}
|
||||
|
||||
const { dataset } = await authDataset({
|
||||
...props,
|
||||
datasetId: file.metadata.datasetId,
|
||||
per
|
||||
});
|
||||
const isOwner =
|
||||
role !== TeamMemberRoleEnum.visitor &&
|
||||
(String(dataset.tmbId) === tmbId || role === TeamMemberRoleEnum.owner);
|
||||
// file role = collection role
|
||||
try {
|
||||
const { isOwner, canWrite } = await authDatasetCollection({
|
||||
...props,
|
||||
collectionId: collection._id,
|
||||
per
|
||||
});
|
||||
|
||||
const canWrite =
|
||||
isOwner ||
|
||||
(role !== TeamMemberRoleEnum.visitor && dataset.permission === PermissionTypeEnum.public);
|
||||
|
||||
if (per === 'r' && !isOwner && dataset.permission !== PermissionTypeEnum.public) {
|
||||
return {
|
||||
userId,
|
||||
teamId,
|
||||
tmbId,
|
||||
file,
|
||||
isOwner,
|
||||
canWrite
|
||||
};
|
||||
} catch (error) {
|
||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||
}
|
||||
if (per === 'w' && !canWrite) {
|
||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||
}
|
||||
if (per === 'owner' && !isOwner) {
|
||||
return Promise.reject(DatasetErrEnum.unAuthDatasetFile);
|
||||
}
|
||||
|
||||
return {
|
||||
userId,
|
||||
teamId,
|
||||
tmbId,
|
||||
file,
|
||||
isOwner,
|
||||
canWrite
|
||||
};
|
||||
}
|
||||
|
@@ -2,7 +2,7 @@ import { AuthResponseType } from '@fastgpt/global/support/permission/type';
|
||||
import { AuthModeType } from '../type';
|
||||
import { OpenApiSchema } from '@fastgpt/global/support/openapi/type';
|
||||
import { parseHeaderCert } from '../controller';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
import { MongoOpenApi } from '../../openapi/schema';
|
||||
import { OpenApiErrEnum } from '@fastgpt/global/common/error/code/openapi';
|
||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||
@@ -21,7 +21,7 @@ export async function authOpenApiKeyCrud({
|
||||
const result = await parseHeaderCert(props);
|
||||
const { tmbId, teamId } = result;
|
||||
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { openapi, isOwner, canWrite } = await (async () => {
|
||||
const openapi = await MongoOpenApi.findOne({ _id: id, teamId });
|
||||
|
@@ -9,7 +9,7 @@ import { MongoApp } from '../../../core/app/schema';
|
||||
import { OutLinkErrEnum } from '@fastgpt/global/common/error/code/outLink';
|
||||
import { PermissionTypeEnum } from '@fastgpt/global/support/permission/constant';
|
||||
import { AppErrEnum } from '@fastgpt/global/common/error/code/app';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
|
||||
/* crud outlink permission */
|
||||
export async function authOutLinkCrud({
|
||||
@@ -27,7 +27,7 @@ export async function authOutLinkCrud({
|
||||
const result = await parseHeaderCert(props);
|
||||
const { tmbId, teamId } = result;
|
||||
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { app, outLink, isOwner, canWrite } = await (async () => {
|
||||
const outLink = await MongoOutLink.findOne({ _id: outLinkId, teamId });
|
||||
|
@@ -1,7 +1,7 @@
|
||||
import { AuthResponseType } from '@fastgpt/global/support/permission/type';
|
||||
import { AuthModeType } from '../type';
|
||||
import { parseHeaderCert } from '../controller';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||
import { MongoPlugin } from '../../../core/plugin/schema';
|
||||
import { PluginErrEnum } from '@fastgpt/global/common/error/code/plugin';
|
||||
@@ -23,7 +23,7 @@ export async function authPluginCrud({
|
||||
const result = await parseHeaderCert(props);
|
||||
const { tmbId, teamId } = result;
|
||||
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
const { plugin, isOwner, canWrite } = await (async () => {
|
||||
const plugin = await MongoPlugin.findOne({ _id: id, teamId });
|
||||
@@ -73,7 +73,7 @@ export async function authPluginCanUse({
|
||||
}
|
||||
|
||||
if (source === PluginSourceEnum.personal) {
|
||||
const { role } = await getTeamInfoByTmbId({ tmbId });
|
||||
const { role } = await getTmbInfoByTmbId({ tmbId });
|
||||
const plugin = await MongoPlugin.findOne({ _id: pluginId, teamId });
|
||||
if (!plugin) {
|
||||
return Promise.reject(PluginErrEnum.unExist);
|
||||
|
@@ -3,7 +3,7 @@ import { AuthModeType } from '../type';
|
||||
import { TeamItemType } from '@fastgpt/global/support/user/team/type';
|
||||
import { TeamMemberRoleEnum } from '@fastgpt/global/support/user/team/constant';
|
||||
import { parseHeaderCert } from '../controller';
|
||||
import { getTeamInfoByTmbId } from '../../user/team/controller';
|
||||
import { getTmbInfoByTmbId } from '../../user/team/controller';
|
||||
import { UserErrEnum } from '../../../../global/common/error/code/user';
|
||||
|
||||
export async function authUserNotVisitor(props: AuthModeType): Promise<
|
||||
@@ -13,7 +13,7 @@ export async function authUserNotVisitor(props: AuthModeType): Promise<
|
||||
}
|
||||
> {
|
||||
const { userId, teamId, tmbId } = await parseHeaderCert(props);
|
||||
const team = await getTeamInfoByTmbId({ tmbId });
|
||||
const team = await getTmbInfoByTmbId({ tmbId });
|
||||
|
||||
if (team.role === TeamMemberRoleEnum.visitor) {
|
||||
return Promise.reject(UserErrEnum.binVisitor);
|
||||
@@ -38,7 +38,7 @@ export async function authUserRole(props: AuthModeType): Promise<
|
||||
}
|
||||
> {
|
||||
const result = await parseHeaderCert(props);
|
||||
const { role: userRole, canWrite } = await getTeamInfoByTmbId({ tmbId: result.tmbId });
|
||||
const { role: userRole, canWrite } = await getTmbInfoByTmbId({ tmbId: result.tmbId });
|
||||
|
||||
return {
|
||||
...result,
|
||||
|
@@ -14,7 +14,7 @@ export const checkDatasetLimit = async ({
|
||||
const usedSize = await getVectorCountByTeamId(teamId);
|
||||
|
||||
if (usedSize + insertLen >= maxSize) {
|
||||
return Promise.reject(`数据库容量已满,无法继续添加。可以在账号页面进行扩容。`);
|
||||
return Promise.reject(`数据库容量不足,无法继续添加。可以在账号页面进行扩容。`);
|
||||
}
|
||||
return;
|
||||
};
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import { UserType } from '@fastgpt/global/support/user/type';
|
||||
import { MongoUser } from './schema';
|
||||
import { getTeamInfoByTmbId, getUserDefaultTeam } from './team/controller';
|
||||
import { getTmbInfoByTmbId, getUserDefaultTeam } from './team/controller';
|
||||
import { ERROR_ENUM } from '@fastgpt/global/common/error/errorCode';
|
||||
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
|
||||
|
||||
@@ -21,16 +21,16 @@ export async function getUserDetail({
|
||||
tmbId?: string;
|
||||
userId?: string;
|
||||
}): Promise<UserType> {
|
||||
const team = await (async () => {
|
||||
const tmb = await (async () => {
|
||||
if (tmbId) {
|
||||
return getTeamInfoByTmbId({ tmbId });
|
||||
return getTmbInfoByTmbId({ tmbId });
|
||||
}
|
||||
if (userId) {
|
||||
return getUserDefaultTeam({ userId });
|
||||
}
|
||||
return Promise.reject(ERROR_ENUM.unAuthorization);
|
||||
})();
|
||||
const user = await MongoUser.findById(team.userId);
|
||||
const user = await MongoUser.findById(tmb.userId);
|
||||
|
||||
if (!user) {
|
||||
return Promise.reject(ERROR_ENUM.unAuthorization);
|
||||
@@ -44,7 +44,7 @@ export async function getUserDetail({
|
||||
timezone: user.timezone,
|
||||
promotionRate: user.promotionRate,
|
||||
openaiAccount: user.openaiAccount,
|
||||
team
|
||||
team: tmb
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -56,9 +56,18 @@ const UserSchema = new Schema({
|
||||
timezone: {
|
||||
type: String,
|
||||
default: 'Asia/Shanghai'
|
||||
},
|
||||
lastLoginTmbId: {
|
||||
type: Schema.Types.ObjectId
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
UserSchema.index({ createTime: -1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoUser: Model<UserModelSchema> =
|
||||
models[userCollectionName] || model(userCollectionName, UserSchema);
|
||||
MongoUser.syncIndexes();
|
||||
|
@@ -8,7 +8,7 @@ import {
|
||||
import { MongoTeamMember } from './teamMemberSchema';
|
||||
import { MongoTeam } from './teamSchema';
|
||||
|
||||
async function getTeam(match: Record<string, any>): Promise<TeamItemType> {
|
||||
async function getTeamMember(match: Record<string, any>): Promise<TeamItemType> {
|
||||
const tmb = (await MongoTeamMember.findOne(match).populate('teamId')) as TeamMemberWithTeamSchema;
|
||||
|
||||
if (!tmb) {
|
||||
@@ -31,11 +31,11 @@ async function getTeam(match: Record<string, any>): Promise<TeamItemType> {
|
||||
};
|
||||
}
|
||||
|
||||
export async function getTeamInfoByTmbId({ tmbId }: { tmbId: string }) {
|
||||
export async function getTmbInfoByTmbId({ tmbId }: { tmbId: string }) {
|
||||
if (!tmbId) {
|
||||
return Promise.reject('tmbId or userId is required');
|
||||
}
|
||||
return getTeam({
|
||||
return getTeamMember({
|
||||
_id: new Types.ObjectId(tmbId),
|
||||
status: notLeaveStatus
|
||||
});
|
||||
@@ -45,7 +45,7 @@ export async function getUserDefaultTeam({ userId }: { userId: string }) {
|
||||
if (!userId) {
|
||||
return Promise.reject('tmbId or userId is required');
|
||||
}
|
||||
return getTeam({
|
||||
return getTeamMember({
|
||||
userId: new Types.ObjectId(userId),
|
||||
defaultTeam: true
|
||||
});
|
||||
|
@@ -24,11 +24,11 @@ const TeamSchema = new Schema({
|
||||
},
|
||||
balance: {
|
||||
type: Number,
|
||||
default: 2 * PRICE_SCALE
|
||||
default: 0
|
||||
},
|
||||
maxSize: {
|
||||
type: Number,
|
||||
default: 5
|
||||
default: 3
|
||||
},
|
||||
limit: {
|
||||
lastExportDatasetTime: {
|
||||
@@ -41,7 +41,7 @@ const TeamSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
TeamSchema.index({ lastDatasetBillTime: -1 });
|
||||
// TeamSchema.index({ createTime: -1 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
@@ -25,15 +25,13 @@ export const createTrainingBill = async ({
|
||||
{
|
||||
moduleName: 'wallet.moduleName.index',
|
||||
model: vectorModel,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
charsLength: 0,
|
||||
amount: 0
|
||||
},
|
||||
{
|
||||
moduleName: 'wallet.moduleName.qa',
|
||||
model: agentModel,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
charsLength: 0,
|
||||
amount: 0
|
||||
}
|
||||
],
|
||||
|
@@ -52,10 +52,8 @@ const BillSchema = new Schema({
|
||||
});
|
||||
|
||||
try {
|
||||
BillSchema.index({ teamId: 1 });
|
||||
BillSchema.index({ tmbId: 1 });
|
||||
BillSchema.index({ tmbId: 1, time: 1 });
|
||||
BillSchema.index({ time: 1 }, { expireAfterSeconds: 90 * 24 * 60 * 60 });
|
||||
BillSchema.index({ teamId: 1, tmbId: 1, time: -1 });
|
||||
BillSchema.index({ time: 1 }, { expireAfterSeconds: 180 * 24 * 60 * 60 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
Reference in New Issue
Block a user