V4.8.18 feature (#3565)

* feat: org CRUD (#3380)

* feat: add org schema

* feat: org manage UI

* feat: OrgInfoModal

* feat: org tree view

* feat: org management

* fix: init root org

* feat: org permission for app

* feat: org support for dataset

* fix: disable org role control

* styles: opt type signatures

* fix: remove unused permission

* feat: delete org collaborator

* perf: Team org ui (#3499)

* perf: org ui

* perf: org ui

* feat: org auth for app & dataset (#3498)

* feat: auth org resource permission

* feat: org auth support for app & dataset

* perf: org permission check (#3500)

* i18n (#3501)

* name

* i18n

* feat: support dataset changeOwner (#3483)

* feat: support dataset changeOwner

* chore: update dataset change owner api

* feat: permission manage UI for org (#3503)

* perf: password check;perf: image upload check;perf: sso login check (#3509)

* perf: password check

* perf: image upload check

* perf: sso login check

* force show update notification modal & fix login page text (#3512)

* fix login page English text

* update notification modal

* perf: notify account (#3515)

* perf(plugin): improve searXNG empty result handling and documentation (#3507)

* perf(plugin): improve searXNG empty result handling and documentation

* 修改了文档和代码部分无搜索的结果的反馈

* refactor: org pathId (#3516)

* optimize payment process (#3517)

* feat: support wecom sso (#3518)

* feat: support wecom sso

* chore: remove unused wecom js-sdk dependency

* fix qrcode script (#3520)

* fix qrcode script

* i18n

* perf: full text collection and search code;perf: rename function (#3519)

* perf: full text collection and search code

* perf: rename function

* perf: notify modal

* remove invalid code

* perf: sso login

* perf: pay process

* 4.8.18 test (#3524)

* perf: remove local token

* perf: index

* perf: file encoding;perf: leave team code;@c121914yu perf: full text search code (#3528)

* perf: text encoding

* perf: leave team code

* perf: full text search code

* fix: http status

* perf: embedding search and vector avatar

* perf: async read file (#3531)

* refactor: team permission  manager (#3535)

* perf: classify org, group and member

* refactor: team per manager

* fix: missing functions

* 4.8.18 test (#3543)

* perf: login check

* doc

* perf: llm model config

* perf: team clb config

* fix: MemberModal UI (#3553)

* fix: adapt MemberModal title and icon

* fix: adapt member modal

* fix: search input placeholder

* fix: add button text

* perf: org permission (#3556)

* docs:用户答疑的官方文档补充 (#3540)

* docs:用户答疑的官方文档补充

* 问题回答的内容修补

* share link random avatar (#3541)

* share link random avatar

* fix

* delete unused code

* share page avatar (#3558)

* feat: init 4818

* share page avatar

* feat: tmp upgrade code (#3559)

* feat: tmp upgrade code

* fulltext search test

* update action

* full text tmp code (#3561)

* full text tmp code

* fix: init

* fix: init

* remove tmp code

* remove tmp code

* 4818-alpha

* 4.8.18 test (#3562)

* full text tmp code

* fix: init

* upgrade code

* account log

* account log

* perf: dockerfile

* upgrade code

* chore: update docs app template submission (#3564)

---------

Co-authored-by: a.e. <49438478+I-Info@users.noreply.github.com>
Co-authored-by: Finley Ge <32237950+FinleyGe@users.noreply.github.com>
Co-authored-by: heheer <heheer@sealos.io>
Co-authored-by: Jiangween <145003935+Jiangween@users.noreply.github.com>
This commit is contained in:
Archer
2025-01-11 15:15:38 +08:00
committed by GitHub
parent bb669ca3ff
commit 10d8c56e23
205 changed files with 5305 additions and 2428 deletions

View File

@@ -4,7 +4,7 @@ import fsp from 'fs/promises';
import fs from 'fs';
import { DatasetFileSchema } from '@fastgpt/global/core/dataset/type';
import { MongoChatFileSchema, MongoDatasetFileSchema } from './schema';
import { detectFileEncoding } from '@fastgpt/global/common/file/tools';
import { detectFileEncoding, detectFileEncodingByPath } from '@fastgpt/global/common/file/tools';
import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MongoRawTextBuffer } from '../../buffer/rawText/schema';
import { readRawContentByFileBuffer } from '../read/utils';
@@ -36,7 +36,6 @@ export async function uploadFile({
path,
filename,
contentType,
encoding,
metadata = {}
}: {
bucketName: `${BucketNameEnum}`;
@@ -45,7 +44,6 @@ export async function uploadFile({
path: string;
filename: string;
contentType?: string;
encoding: string;
metadata?: Record<string, any>;
}) {
if (!path) return Promise.reject(`filePath is empty`);
@@ -59,7 +57,7 @@ export async function uploadFile({
// Add default metadata
metadata.teamId = teamId;
metadata.uid = uid;
metadata.encoding = encoding;
metadata.encoding = await detectFileEncodingByPath(path);
// create a gridfs bucket
const bucket = getGridBucket(bucketName);

View File

@@ -1,43 +1,92 @@
import { UploadImgProps } from '@fastgpt/global/common/file/api';
import { imageBaseUrl } from '@fastgpt/global/common/file/image/constants';
import { MongoImage } from './schema';
import { ClientSession } from '../../../common/mongo';
import { ClientSession, Types } from '../../../common/mongo';
import { guessBase64ImageType } from '../utils';
import { readFromSecondary } from '../../mongo/utils';
import { addHours } from 'date-fns';
export const maxImgSize = 1024 * 1024 * 12;
const base64MimeRegex = /data:image\/([^\)]+);base64/;
export async function uploadMongoImg({
type,
base64Img,
teamId,
expiredTime,
metadata,
shareId
shareId,
forever = false
}: UploadImgProps & {
teamId: string;
forever?: Boolean;
}) {
if (base64Img.length > maxImgSize) {
return Promise.reject('Image too large');
}
const [base64Mime, base64Data] = base64Img.split(',');
// Check if mime type is valid
if (!base64MimeRegex.test(base64Mime)) {
return Promise.reject('Invalid image mime type');
}
const mime = `image/${base64Mime.match(base64MimeRegex)?.[1] ?? 'image/jpeg'}`;
const binary = Buffer.from(base64Data, 'base64');
const extension = mime.split('/')[1];
const { _id } = await MongoImage.create({
type,
teamId,
binary,
expiredTime,
metadata: Object.assign({ mime }, metadata),
shareId
shareId,
expiredTime: forever ? undefined : addHours(new Date(), 1)
});
return `${process.env.FE_DOMAIN || ''}${process.env.NEXT_PUBLIC_BASE_URL || ''}${imageBaseUrl}${String(_id)}.${extension}`;
}
const getIdFromPath = (path?: string) => {
if (!path) return;
const paths = path.split('/');
const name = paths[paths.length - 1];
if (!name) return;
const id = name.split('.')[0];
if (!id || !Types.ObjectId.isValid(id)) return;
return id;
};
// 删除旧的头像,新的头像去除过期时间
export const refreshSourceAvatar = async (
path?: string,
oldPath?: string,
session?: ClientSession
) => {
const newId = getIdFromPath(path);
const oldId = getIdFromPath(oldPath);
if (!newId) return;
await MongoImage.updateOne({ _id: newId }, { $unset: { expiredTime: 1 } }, { session });
if (oldId) {
await MongoImage.deleteOne({ _id: oldId }, { session });
}
};
export const removeImageByPath = (path?: string, session?: ClientSession) => {
if (!path) return;
const paths = path.split('/');
const name = paths[paths.length - 1];
if (!name) return;
const id = name.split('.')[0];
if (!id || !Types.ObjectId.isValid(id)) return;
return MongoImage.deleteOne({ _id: id }, { session });
};
export async function readMongoImg({ id }: { id: string }) {
const formatId = id.replace(/\.[^/.]+$/, '');

View File

@@ -1,8 +1,7 @@
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { connectionMongo, getMongoModel, type Model } from '../../mongo';
import { connectionMongo, getMongoModel } from '../../mongo';
import { MongoImageSchemaType } from '@fastgpt/global/common/file/image/type.d';
import { mongoImageTypeMap } from '@fastgpt/global/common/file/image/constants';
const { Schema, model, models } = connectionMongo;
const { Schema } = connectionMongo;
const ImageSchema = new Schema({
teamId: {
@@ -14,27 +13,15 @@ const ImageSchema = new Schema({
type: Date,
default: () => new Date()
},
expiredTime: {
type: Date
},
binary: {
type: Buffer
},
type: {
type: String,
enum: Object.keys(mongoImageTypeMap),
required: true
},
metadata: {
type: Object
}
expiredTime: Date,
binary: Buffer,
metadata: Object
});
try {
// tts expired60 Minutes
ImageSchema.index({ expiredTime: 1 }, { expireAfterSeconds: 60 * 60 });
ImageSchema.index({ type: 1 });
ImageSchema.index({ createTime: 1 });
// delete related img
ImageSchema.index({ teamId: 1, 'metadata.relatedId': 1 });
} catch (error) {

View File

@@ -1,5 +1,4 @@
import { uploadMongoImg } from '../image/controller';
import { MongoImageTypeEnum } from '@fastgpt/global/common/file/image/constants';
import FormData from 'form-data';
import { WorkerNameEnum, runWorker } from '../../../worker/utils';
@@ -8,7 +7,6 @@ import type { ReadFileResponse } from '../../../worker/readFile/type';
import axios from 'axios';
import { addLog } from '../../system/log';
import { batchRun } from '@fastgpt/global/common/fn/utils';
import { addHours } from 'date-fns';
import { matchMdImgTextAndUpload } from '@fastgpt/global/common/string/markdown';
export type readRawTextByLocalFileParams = {
@@ -22,7 +20,7 @@ export const readRawTextByLocalFile = async (params: readRawTextByLocalFileParam
const extension = path?.split('.')?.pop()?.toLowerCase() || '';
const buffer = fs.readFileSync(path);
const buffer = await fs.promises.readFile(path);
const { rawText } = await readRawContentByFileBuffer({
extension,
@@ -114,10 +112,9 @@ export const readRawContentByFileBuffer = async ({
if (imageList) {
await batchRun(imageList, async (item) => {
const src = await uploadMongoImg({
type: MongoImageTypeEnum.collectionImage,
base64Img: `data:${item.mime};base64,${item.base64}`,
teamId,
expiredTime: addHours(new Date(), 1),
// expiredTime: addHours(new Date(), 1),
metadata: {
...metadata,
mime: item.mime

View File

@@ -9,10 +9,10 @@ import { jsonRes } from '../response';
// unit: times/s
// how to use?
// export default NextAPI(useQPSLimit(10), handler); // limit 10 times per second for a ip
export function useReqFrequencyLimit(seconds: number, limit: number) {
export function useReqFrequencyLimit(seconds: number, limit: number, force = false) {
return async (req: ApiRequestProps, res: NextApiResponse) => {
const ip = requestIp.getClientIp(req);
if (!ip || process.env.USE_IP_LIMIT !== 'true') {
if (!ip || (process.env.USE_IP_LIMIT !== 'true' && !force)) {
return;
}
try {
@@ -22,10 +22,9 @@ export function useReqFrequencyLimit(seconds: number, limit: number) {
expiredTime: addSeconds(new Date(), seconds)
});
} catch (_) {
res.status(429);
jsonRes(res, {
code: 429,
message: ERROR_ENUM.tooManyRequest
error: ERROR_ENUM.tooManyRequest
});
}
};

View File

@@ -33,7 +33,7 @@ export const jsonRes = <T = any>(
addLog.error(`Api response error: ${url}`, ERROR_RESPONSE[errResponseKey]);
return res.json(ERROR_RESPONSE[errResponseKey]);
return res.status(code).json(ERROR_RESPONSE[errResponseKey]);
}
// another error

View File

@@ -0,0 +1,11 @@
{
"provider": "OpenAI",
"model": "text-embedding-ada-002",
"name": "text-embedding-ada-002",
"defaultToken": 512, // 默认分块 token
"maxToken": 3000, // 最大分块 token
"weight": 0, // 权重
"charsPointsPrice": 0 // 积分/1k token
}

View File

@@ -0,0 +1,33 @@
{
"provider": "OpenAI",
"model": "gpt-4o-mini",
"name": "GPT-4o-mini", // alias
"maxContext": 125000, // 最大上下文
"maxResponse": 16000, // 最大回复
"quoteMaxToken": 60000, // 最大引用
"maxTemperature": 1.2, // 最大温度
"presencePenaltyRange": [-2, 2], // 惩罚系数范围
"frequencyPenaltyRange": [-2, 2], // 频率惩罚系数范围
"responseFormatList": ["text", "json_object", "json_schema"], // 响应格式
"showStopSign": true, // 是否显示停止符号
"vision": true, // 是否支持图片识别
"toolChoice": true, // 是否支持工具调用
"functionCall": false, // 是否支持函数调用(一般都可以 false 了,基本不用了)
"defaultSystemChatPrompt": "", // 默认系统提示
"datasetProcess": true, // 用于知识库文本处理
"usedInClassify": true, // 用于问题分类
"customCQPrompt": "", // 自定义问题分类提示
"usedInExtractFields": true, // 用于提取字段
"customExtractPrompt": "", // 自定义提取提示
"usedInToolCall": true, // 用于工具调用
"usedInQueryExtension": true, // 用于问题优化
"defaultConfig": {}, // 额外的自定义 body
"fieldMap": {}, // body 字段映射
"censor": false, // 是否开启敏感词过滤
"charsPointsPrice": 0 // n 积分/1k token
}

View File

@@ -0,0 +1,6 @@
{
"provider": "BAAI",
"model": "bge-reranker-v2-m3",
"name": "bge-reranker-v2-m3",
"charsPointsPrice": 0
}

View File

@@ -0,0 +1,6 @@
{
"provider": "OpenAI",
"model": "whisper-1",
"name": "whisper-1",
"charsPointsPrice": 0
}

View File

@@ -0,0 +1,32 @@
{
"provider": "OpenAI",
"model": "tts-1",
"name": "TTS1",
"charsPointsPrice": 0,
"voices": [
{
"label": "Alloy",
"value": "alloy"
},
{
"label": "Echo",
"value": "echo"
},
{
"label": "Fable",
"value": "fable"
},
{
"label": "Onyx",
"value": "onyx"
},
{
"label": "Nova",
"value": "nova"
},
{
"label": "Shimmer",
"value": "shimmer"
}
]
}

View File

@@ -5,11 +5,11 @@ import { PluginSourceEnum } from '@fastgpt/global/core/plugin/constants';
/*
Plugin points calculation:
1. 商业版插件:
1. 系统插件/商业版插件:
- 有错误:返回 0
- 无错误:返回 配置的点数 + 子节点点数
2. 其他插件
- 返回 子节点点数
- 无错误:返回 单次积分 + 子流程积分(可配置)
2. 个人插件
- 返回 子流程积分
*/
export const computedPluginUsage = async ({
plugin,
@@ -26,9 +26,9 @@ export const computedPluginUsage = async ({
if (source !== PluginSourceEnum.personal) {
if (error) return 0;
const pluginCurrentCose = plugin.currentCost ?? 0;
const pluginCurrentCost = plugin.currentCost ?? 0;
return plugin.hasTokenFee ? pluginCurrentCose + childrenUsages : pluginCurrentCose;
return plugin.hasTokenFee ? pluginCurrentCost + childrenUsages : pluginCurrentCost;
}
return childrenUsages;

View File

@@ -86,24 +86,21 @@ const ChatItemSchema = new Schema({
});
try {
ChatItemSchema.index({ dataId: 1 }, { background: true });
ChatItemSchema.index({ dataId: 1 });
/* delete by app;
delete by chat id;
get chat list;
get chat logs;
close custom feedback;
*/
ChatItemSchema.index({ appId: 1, chatId: 1, dataId: 1 }, { background: true });
ChatItemSchema.index({ appId: 1, chatId: 1, dataId: 1 });
// admin charts
ChatItemSchema.index({ time: -1, obj: 1 }, { background: true });
ChatItemSchema.index({ time: -1, obj: 1 });
// timer, clear history
ChatItemSchema.index({ teamId: 1, time: -1 }, { background: true });
ChatItemSchema.index({ teamId: 1, time: -1 });
// Admin charts
ChatItemSchema.index(
{ obj: 1, time: -1 },
{ background: true, partialFilterExpression: { obj: 'Human' } }
);
ChatItemSchema.index({ obj: 1, time: -1 }, { partialFilterExpression: { obj: 'Human' } });
} catch (error) {
console.log(error);
}

View File

@@ -81,19 +81,19 @@ const ChatSchema = new Schema({
});
try {
ChatSchema.index({ chatId: 1 }, { background: true });
ChatSchema.index({ chatId: 1 });
// get user history
ChatSchema.index({ tmbId: 1, appId: 1, top: -1, updateTime: -1 }, { background: true });
ChatSchema.index({ tmbId: 1, appId: 1, top: -1, updateTime: -1 });
// delete by appid; clear history; init chat; update chat; auth chat; get chat;
ChatSchema.index({ appId: 1, chatId: 1 }, { background: true });
ChatSchema.index({ appId: 1, chatId: 1 });
// get chat logs;
ChatSchema.index({ teamId: 1, appId: 1, updateTime: -1 }, { background: true });
ChatSchema.index({ teamId: 1, appId: 1, updateTime: -1 });
// get share chat history
ChatSchema.index({ shareId: 1, outLinkUid: 1, updateTime: -1 }, { background: true });
ChatSchema.index({ shareId: 1, outLinkUid: 1, updateTime: -1 });
// timer, clear history
ChatSchema.index({ teamId: 1, updateTime: -1 }, { background: true });
ChatSchema.index({ teamId: 1, updateTime: -1 });
} catch (error) {
console.log(error);
}

View File

@@ -24,6 +24,7 @@ import { pushDataListToTrainingQueue } from '../training/controller';
import { MongoImage } from '../../../common/file/image/schema';
import { hashStr } from '@fastgpt/global/common/string/tools';
import { addDays } from 'date-fns';
import { MongoDatasetDataText } from '../data/dataTextSchema';
export const createCollectionAndInsertData = async ({
dataset,
@@ -240,12 +241,12 @@ export const delCollectionRelatedSource = async ({
.map((item) => item?.metadata?.relatedImgId || '')
.filter(Boolean);
// delete files
// Delete files
await delFileByFileIdList({
bucketName: BucketNameEnum.dataset,
fileIdList
});
// delete images
// Delete images
await delImgByRelatedId({
teamId,
relateIds: relatedImageIds,
@@ -273,7 +274,7 @@ export async function delCollection({
const datasetIds = Array.from(new Set(collections.map((item) => String(item.datasetId))));
const collectionIds = collections.map((item) => String(item._id));
// delete training data
// Delete training data
await MongoDatasetTraining.deleteMany({
teamId,
datasetIds: { $in: datasetIds },
@@ -285,11 +286,16 @@ export async function delCollection({
await delCollectionRelatedSource({ collections, session });
}
// delete dataset.datas
// Delete dataset_datas
await MongoDatasetData.deleteMany(
{ teamId, datasetIds: { $in: datasetIds }, collectionId: { $in: collectionIds } },
{ session }
);
// Delete dataset_data_texts
await MongoDatasetDataText.deleteMany(
{ teamId, datasetIds: { $in: datasetIds }, collectionId: { $in: collectionIds } },
{ session }
);
// delete collections
await MongoDatasetCollection.deleteMany(

View File

@@ -6,6 +6,7 @@ import { ClientSession } from '../../common/mongo';
import { MongoDatasetTraining } from './training/schema';
import { MongoDatasetData } from './data/schema';
import { deleteDatasetDataVector } from '../../common/vectorStore/controller';
import { MongoDatasetDataText } from './data/dataTextSchema';
/* ============= dataset ========== */
/* find all datasetId by top datasetId */
@@ -92,7 +93,7 @@ export async function delDatasetRelevantData({
{ session }
).lean();
// image and file
// Delete Image and file
await delCollectionRelatedSource({ collections, session });
// delete collections
@@ -101,9 +102,15 @@ export async function delDatasetRelevantData({
datasetId: { $in: datasetIds }
}).session(session);
// delete dataset.datas(Not need session)
// No session delete:
// Delete dataset_data_texts
await MongoDatasetDataText.deleteMany({
teamId,
datasetId: { $in: datasetIds }
});
// delete dataset_datas
await MongoDatasetData.deleteMany({ teamId, datasetId: { $in: datasetIds } });
// no session delete: delete files, vector data
// Delete vector data
await deleteDatasetDataVector({ teamId, datasetIds });
}

View File

@@ -0,0 +1,45 @@
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema } = connectionMongo;
import { DatasetDataSchemaType } from '@fastgpt/global/core/dataset/type.d';
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { DatasetCollectionName } from '../schema';
import { DatasetColCollectionName } from '../collection/schema';
import { DatasetDataCollectionName } from './schema';
export const DatasetDataTextCollectionName = 'dataset_data_texts';
const DatasetDataTextSchema = new Schema({
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
required: true
},
datasetId: {
type: Schema.Types.ObjectId,
ref: DatasetCollectionName,
required: true
},
collectionId: {
type: Schema.Types.ObjectId,
ref: DatasetColCollectionName,
required: true
},
dataId: {
type: Schema.Types.ObjectId,
ref: DatasetDataCollectionName,
required: true
},
fullTextToken: String
});
try {
DatasetDataTextSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
DatasetDataTextSchema.index({ dataId: 1 }, { unique: true });
} catch (error) {
console.log(error);
}
export const MongoDatasetDataText = getMongoModel<DatasetDataSchemaType>(
DatasetDataTextCollectionName,
DatasetDataTextSchema
);

View File

@@ -1,4 +1,4 @@
import { connectionMongo, getMongoModel, type Model } from '../../../common/mongo';
import { connectionMongo, getMongoModel } from '../../../common/mongo';
const { Schema, model, models } = connectionMongo;
import { DatasetDataSchemaType } from '@fastgpt/global/core/dataset/type.d';
import {
@@ -39,10 +39,6 @@ const DatasetDataSchema = new Schema({
type: String,
default: ''
},
fullTextToken: {
type: String,
default: ''
},
indexes: {
type: [
{
@@ -71,17 +67,11 @@ const DatasetDataSchema = new Schema({
type: Number,
default: 0
},
inited: {
type: Boolean
},
rebuilding: Boolean
});
rebuilding: Boolean,
DatasetDataSchema.virtual('collection', {
ref: DatasetColCollectionName,
localField: 'collectionId',
foreignField: '_id',
justOne: true
// Abandon
fullTextToken: String,
initFullText: Boolean
});
try {
@@ -93,13 +83,15 @@ try {
chunkIndex: 1,
updateTime: -1
});
// full text index
DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
// FullText tmp full text index
// DatasetDataSchema.index({ teamId: 1, datasetId: 1, fullTextToken: 'text' });
// Recall vectors after data matching
DatasetDataSchema.index({ teamId: 1, datasetId: 1, collectionId: 1, 'indexes.dataId': 1 });
DatasetDataSchema.index({ updateTime: 1 });
// rebuild data
DatasetDataSchema.index({ rebuilding: 1, teamId: 1, datasetId: 1 });
DatasetDataSchema.index({ initFullText: 1 });
} catch (error) {
console.log(error);
}

View File

@@ -8,8 +8,8 @@ import { getVectorsByText } from '../../ai/embedding';
import { getVectorModel } from '../../ai/model';
import { MongoDatasetData } from '../data/schema';
import {
DatasetCollectionSchemaType,
DatasetDataSchemaType,
DatasetDataTextSchemaType,
SearchDataResponseItemType
} from '@fastgpt/global/core/dataset/type';
import { MongoDatasetCollection } from '../collection/schema';
@@ -23,6 +23,7 @@ import { Types } from '../../../common/mongo';
import json5 from 'json5';
import { MongoDatasetCollectionTags } from '../tag/schema';
import { readFromSecondary } from '../../../common/mongo/utils';
import { MongoDatasetDataText } from '../data/dataTextSchema';
type SearchDatasetDataProps = {
teamId: string;
@@ -266,57 +267,60 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
filterCollectionIdList
});
// get q and a
const dataList = await MongoDatasetData.find(
{
teamId,
datasetId: { $in: datasetIds },
collectionId: { $in: Array.from(new Set(results.map((item) => item.collectionId))) },
'indexes.dataId': { $in: results.map((item) => item.id?.trim()) }
},
'datasetId collectionId updateTime q a chunkIndex indexes'
)
.populate<{ collection: DatasetCollectionSchemaType }>(
'collection',
'name fileId rawLink externalFileId externalFileUrl'
)
.lean();
// Get data and collections
const collectionIdList = Array.from(new Set(results.map((item) => item.collectionId)));
const [dataList, collections] = await Promise.all([
MongoDatasetData.find(
{
teamId,
datasetId: { $in: datasetIds },
collectionId: { $in: collectionIdList },
'indexes.dataId': { $in: results.map((item) => item.id?.trim()) }
},
'_id datasetId collectionId updateTime q a chunkIndex indexes',
{ ...readFromSecondary }
).lean(),
MongoDatasetCollection.find(
{
_id: { $in: collectionIdList }
},
'_id name fileId rawLink externalFileId externalFileUrl',
{ ...readFromSecondary }
).lean()
]);
// add score to data(It's already sorted. The first one is the one with the most points)
const concatResults = dataList.map((data) => {
const dataIdList = data.indexes.map((item) => item.dataId);
const formatResult = results
.map((item, index) => {
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
if (!collection) {
console.log('Collection is not found', item);
return;
}
const data = dataList.find((data) =>
data.indexes.some((index) => index.dataId === item.id)
);
if (!data) {
console.log('Data is not found', item);
return;
}
const maxScoreResult = results.find((item) => {
return dataIdList.includes(item.id);
});
const score = item?.score || 0;
return {
...data,
score: maxScoreResult?.score || 0
};
});
const result: SearchDataResponseItemType = {
id: String(data._id),
updateTime: data.updateTime,
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
...getCollectionSourceData(collection),
score: [{ type: SearchScoreTypeEnum.embedding, value: score, index }]
};
concatResults.sort((a, b) => b.score - a.score);
const formatResult = concatResults.map((data, index) => {
if (!data.collectionId) {
console.log('Collection is not found', data);
}
const result: SearchDataResponseItemType = {
id: String(data._id),
updateTime: data.updateTime,
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
...getCollectionSourceData(data.collection),
score: [{ type: SearchScoreTypeEnum.embedding, value: data.score, index }]
};
return result;
});
return result;
})
.filter(Boolean) as SearchDataResponseItemType[];
return {
embeddingRecallResults: formatResult,
@@ -344,88 +348,224 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
};
}
let searchResults = (
const searchResults = (
await Promise.all(
datasetIds.map(async (id) => {
return MongoDatasetData.aggregate([
{
$match: {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(id),
$text: { $search: jiebaSplit({ text: query }) },
...(filterCollectionIdList
? {
collectionId: {
$in: filterCollectionIdList.map((id) => new Types.ObjectId(id))
return MongoDatasetData.aggregate(
[
{
$match: {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(id),
$text: { $search: jiebaSplit({ text: query }) },
...(filterCollectionIdList
? {
collectionId: {
$in: filterCollectionIdList.map((id) => new Types.ObjectId(id))
}
}
}
: {}),
...(forbidCollectionIdList && forbidCollectionIdList.length > 0
? {
collectionId: {
$nin: forbidCollectionIdList.map((id) => new Types.ObjectId(id))
: {}),
...(forbidCollectionIdList && forbidCollectionIdList.length > 0
? {
collectionId: {
$nin: forbidCollectionIdList.map((id) => new Types.ObjectId(id))
}
}
}
: {})
: {})
}
},
{
$sort: {
score: { $meta: 'textScore' }
}
},
{
$limit: limit
},
{
$project: {
_id: 1,
datasetId: 1,
collectionId: 1,
updateTime: 1,
q: 1,
a: 1,
chunkIndex: 1,
score: { $meta: 'textScore' }
}
}
},
],
{
$addFields: {
score: { $meta: 'textScore' }
}
},
{
$sort: {
score: { $meta: 'textScore' }
}
},
{
$limit: limit
},
{
$project: {
_id: 1,
datasetId: 1,
collectionId: 1,
updateTime: 1,
q: 1,
a: 1,
chunkIndex: 1,
score: 1
}
...readFromSecondary
}
]);
);
})
)
).flat() as (DatasetDataSchemaType & { score: number })[];
// resort
searchResults.sort((a, b) => b.score - a.score);
searchResults.slice(0, limit);
// Get data and collections
const collections = await MongoDatasetCollection.find(
{
_id: { $in: searchResults.map((item) => item.collectionId) }
},
'_id name fileId rawLink'
);
'_id name fileId rawLink externalFileId externalFileUrl',
{ ...readFromSecondary }
).lean();
return {
fullTextRecallResults: searchResults.map((item, index) => {
const collection = collections.find((col) => String(col._id) === String(item.collectionId));
return {
id: String(item._id),
datasetId: String(item.datasetId),
collectionId: String(item.collectionId),
updateTime: item.updateTime,
...getCollectionSourceData(collection),
q: item.q,
a: item.a,
chunkIndex: item.chunkIndex,
indexes: item.indexes,
score: [{ type: SearchScoreTypeEnum.fullText, value: item.score, index }]
};
}),
fullTextRecallResults: searchResults
.map((data, index) => {
const collection = collections.find(
(col) => String(col._id) === String(data.collectionId)
);
if (!collection) {
console.log('Collection is not found', data);
return;
}
return {
id: String(data._id),
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
updateTime: data.updateTime,
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
indexes: data.indexes,
...getCollectionSourceData(collection),
score: [{ type: SearchScoreTypeEnum.fullText, value: data.score ?? 0, index }]
};
})
.filter(Boolean) as SearchDataResponseItemType[],
tokenLen: 0
};
};
const fullTextRecall2 = async ({
query,
limit,
filterCollectionIdList,
forbidCollectionIdList
}: {
query: string;
limit: number;
filterCollectionIdList?: string[];
forbidCollectionIdList: string[];
}): Promise<{
fullTextRecallResults: SearchDataResponseItemType[];
tokenLen: number;
}> => {
if (limit === 0) {
return {
fullTextRecallResults: [],
tokenLen: 0
};
}
const searchResults = (
await Promise.all(
datasetIds.map(async (id) => {
return MongoDatasetDataText.aggregate(
[
{
$match: {
teamId: new Types.ObjectId(teamId),
datasetId: new Types.ObjectId(id),
$text: { $search: jiebaSplit({ text: query }) },
...(filterCollectionIdList
? {
collectionId: {
$in: filterCollectionIdList.map((id) => new Types.ObjectId(id))
}
}
: {}),
...(forbidCollectionIdList && forbidCollectionIdList.length > 0
? {
collectionId: {
$nin: forbidCollectionIdList.map((id) => new Types.ObjectId(id))
}
}
: {})
}
},
{
$sort: {
score: { $meta: 'textScore' }
}
},
{
$limit: limit
},
{
$project: {
_id: 1,
collectionId: 1,
dataId: 1,
score: { $meta: 'textScore' }
}
}
],
{
...readFromSecondary
}
);
})
)
).flat() as (DatasetDataTextSchemaType & { score: number })[];
// Get data and collections
const [dataList, collections] = await Promise.all([
MongoDatasetData.find(
{
_id: { $in: searchResults.map((item) => item.dataId) }
},
'_id datasetId collectionId updateTime q a chunkIndex indexes',
{ ...readFromSecondary }
).lean(),
MongoDatasetCollection.find(
{
_id: { $in: searchResults.map((item) => item.collectionId) }
},
'_id name fileId rawLink externalFileId externalFileUrl',
{ ...readFromSecondary }
).lean()
]);
return {
fullTextRecallResults: searchResults
.map((item, index) => {
const collection = collections.find(
(col) => String(col._id) === String(item.collectionId)
);
if (!collection) {
console.log('Collection is not found', item);
return;
}
const data = dataList.find((data) => String(data._id) === String(item.dataId));
if (!data) {
console.log('Data is not found', item);
return;
}
return {
id: String(data._id),
datasetId: String(data.datasetId),
collectionId: String(data.collectionId),
updateTime: data.updateTime,
q: data.q,
a: data.a,
chunkIndex: data.chunkIndex,
indexes: data.indexes,
...getCollectionSourceData(collection),
score: [
{
type: SearchScoreTypeEnum.fullText,
value: item.score || 0,
index
}
]
};
})
.filter(Boolean) as SearchDataResponseItemType[],
tokenLen: 0
};
};
@@ -496,7 +636,8 @@ export async function searchDatasetData(props: SearchDatasetDataProps) {
forbidCollectionIdList,
filterCollectionIdList
}),
fullTextRecall({
// FullText tmp
fullTextRecall2({
query,
limit: fullTextLimit,
filterCollectionIdList,

View File

@@ -92,6 +92,7 @@ OutLinkSchema.virtual('associatedApp', {
try {
OutLinkSchema.index({ shareId: -1 });
OutLinkSchema.index({ teamId: 1, tmbId: 1, appId: 1 });
} catch (error) {
console.log(error);
}

View File

@@ -0,0 +1,43 @@
import { TeamPermission } from '@fastgpt/global/support/permission/user/controller';
import { AuthModeType, AuthResponseType } from '../type';
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import { authUserPer } from '../user/auth';
import { ManagePermissionVal } from '@fastgpt/global/support/permission/constant';
/*
Team manager can control org
*/
export const authOrgMember = async ({
orgIds,
...props
}: {
orgIds: string | string[];
} & AuthModeType): Promise<AuthResponseType> => {
const result = await authUserPer({
...props,
per: ManagePermissionVal
});
const { teamId, tmbId, isRoot, tmb } = result;
if (isRoot) {
return {
teamId,
tmbId,
userId: result.userId,
appId: result.appId,
apikey: result.apikey,
isRoot,
authType: result.authType,
permission: new TeamPermission({ isOwner: true })
};
}
if (tmb.permission.hasManagePer) {
return {
...result,
permission: tmb.permission
};
}
return Promise.reject(TeamErrEnum.unAuthTeam);
};

View File

@@ -1,8 +1,8 @@
import { MongoTeamMember } from '../../user/team/teamMemberSchema';
import { checkTeamAIPoints } from '../teamLimit';
import { UserErrEnum } from '@fastgpt/global/common/error/code/user';
import { UserModelSchema } from '@fastgpt/global/support/user/type';
import { TeamSchema } from '@fastgpt/global/support/user/team/type';
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
const tmb = await MongoTeamMember.findById(tmbId, 'userId teamId')
@@ -18,7 +18,7 @@ export async function getUserChatInfoAndAuthTeamPoints(tmbId: string) {
])
.lean();
if (!tmb) return Promise.reject(UserErrEnum.unAuthUser);
if (!tmb) return Promise.reject(TeamErrEnum.notUser);
await checkTeamAIPoints(tmb.team._id);

View File

@@ -8,10 +8,7 @@ import { authOpenApiKey } from '../openapi/auth';
import { FileTokenQuery } from '@fastgpt/global/common/file/type';
import { MongoResourcePermission } from './schema';
import { ClientSession } from 'mongoose';
import {
PermissionValueType,
ResourcePermissionType
} from '@fastgpt/global/support/permission/type';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
import { bucketNameMap } from '@fastgpt/global/common/file/constants';
import { addMinutes } from 'date-fns';
import { getGroupsByTmbId } from './memberGroup/controllers';
@@ -21,6 +18,8 @@ import { CommonErrEnum } from '@fastgpt/global/common/error/code/common';
import { MemberGroupSchemaType } from '@fastgpt/global/support/permission/memberGroup/type';
import { TeamMemberSchema } from '@fastgpt/global/support/user/team/type';
import { UserModelSchema } from '@fastgpt/global/support/user/type';
import { OrgSchemaType } from '@fastgpt/global/support/user/team/org/type';
import { getOrgIdSetWithParentByTmbId } from './org/controllers';
/** get resource permission for a team member
* If there is no permission for the team member, it will return undefined
@@ -67,67 +66,44 @@ export const getResourcePermission = async ({
}
// If there is no personal permission, get the group permission
const groupIdList = (await getGroupsByTmbId({ tmbId, teamId })).map((item) => item._id);
const [groupPers, orgPers] = await Promise.all([
getGroupsByTmbId({ tmbId, teamId })
.then((res) => res.map((item) => item._id))
.then((groupIdList) =>
MongoResourcePermission.find(
{
teamId,
resourceType,
groupId: {
$in: groupIdList
},
resourceId
},
'permission'
).lean()
)
.then((perList) => perList.map((item) => item.permission)),
getOrgIdSetWithParentByTmbId({ tmbId, teamId })
.then((item) => Array.from(item))
.then((orgIds) =>
MongoResourcePermission.find(
{
teamId,
resourceType,
orgId: {
$in: Array.from(orgIds)
},
resourceId
},
'permission'
).lean()
)
.then((perList) => perList.map((item) => item.permission))
]);
if (groupIdList.length === 0) {
return undefined;
}
// get the maximum permission of the group
const pers = (
await MongoResourcePermission.find(
{
teamId,
resourceType,
groupId: {
$in: groupIdList
},
resourceId
},
'permission'
).lean()
).map((item) => item.permission);
const groupPer = getGroupPer(pers);
return groupPer;
return concatPer([...groupPers, ...orgPers]);
};
/* 仅取 members 不取 groups */
export async function getResourceAllClbs({
resourceId,
teamId,
resourceType,
session
}: {
teamId: string;
session?: ClientSession;
} & (
| {
resourceType: 'team';
resourceId?: undefined;
}
| {
resourceType: Omit<PerResourceTypeEnum, 'team'>;
resourceId?: string | null;
}
)): Promise<ResourcePermissionType[]> {
return MongoResourcePermission.find(
{
resourceType: resourceType,
teamId: teamId,
resourceId,
groupId: {
$exists: false
}
},
null,
{
session
}
).lean();
}
export async function getResourceClbsAndGroups({
resourceId,
resourceType,
@@ -155,10 +131,17 @@ export const getClbsAndGroupsWithInfo = async ({
resourceType,
teamId
}: {
resourceId: ParentIdType;
resourceType: Omit<`${PerResourceTypeEnum}`, 'team'>;
teamId: string;
}) =>
} & (
| {
resourceId: ParentIdType;
resourceType: Omit<`${PerResourceTypeEnum}`, 'team'>;
}
| {
resourceType: 'team';
resourceId?: undefined;
}
)) =>
Promise.all([
MongoResourcePermission.find({
teamId,
@@ -170,7 +153,7 @@ export const getClbsAndGroupsWithInfo = async ({
})
.populate<{ tmb: TeamMemberSchema & { user: UserModelSchema } }>({
path: 'tmb',
select: 'name userId',
select: 'name userId role',
populate: {
path: 'user',
select: 'avatar'
@@ -186,6 +169,16 @@ export const getClbsAndGroupsWithInfo = async ({
}
})
.populate<{ group: MemberGroupSchemaType }>('group', 'name avatar')
.lean(),
MongoResourcePermission.find({
teamId,
resourceId,
resourceType,
orgId: {
$exists: true
}
})
.populate<{ org: OrgSchemaType }>({ path: 'org', select: 'name avatar' })
.lean()
]);
@@ -196,6 +189,7 @@ export const delResourcePermission = ({
session,
tmbId,
groupId,
orgId,
...props
}: {
resourceType: PerResourceTypeEnum;
@@ -204,15 +198,18 @@ export const delResourcePermission = ({
session?: ClientSession;
tmbId?: string;
groupId?: string;
orgId?: string;
}) => {
// tmbId or groupId only one and not both
if (!!tmbId === !!groupId) {
// either tmbId or groupId or orgId must be provided
if (!tmbId && !groupId && !orgId) {
return Promise.reject(CommonErrEnum.missingParams);
}
return MongoResourcePermission.deleteOne(
{
...(tmbId ? { tmbId } : {}),
...(groupId ? { groupId } : {}),
...(orgId ? { orgId } : {}),
...props
},
{ session }
@@ -250,7 +247,7 @@ export function authJWT(token: string) {
}>((resolve, reject) => {
const key = process.env.TOKEN_KEY as string;
jwt.verify(token, key, function (err, decoded: any) {
jwt.verify(token, key, (err, decoded: any) => {
if (err || !decoded?.userId) {
reject(ERROR_ENUM.unAuthorization);
return;
@@ -436,7 +433,7 @@ export const authFileToken = (token?: string) =>
}
const key = (process.env.FILE_TOKEN_KEY as string) ?? 'filetoken';
jwt.verify(token, key, function (err, decoded: any) {
jwt.verify(token, key, (err, decoded: any) => {
if (err || !decoded.bucketName || !decoded?.teamId || !decoded?.fileId) {
reject(ERROR_ENUM.unAuthFile);
return;
@@ -450,10 +447,10 @@ export const authFileToken = (token?: string) =>
});
});
export const getGroupPer = (groups: PermissionValueType[] = []) => {
if (groups.length === 0) {
export const concatPer = (perList: PermissionValueType[] = []) => {
if (perList.length === 0) {
return undefined;
}
return new Permission().addPer(...groups).value;
return new Permission().addPer(...perList).value;
};

View File

@@ -1,11 +1,11 @@
import { mongoSessionRun } from '../../common/mongo/sessionRun';
import { MongoResourcePermission } from './schema';
import { ClientSession, Model } from 'mongoose';
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { PermissionValueType } from '@fastgpt/global/support/permission/type';
import type { ClientSession, Model } from 'mongoose';
import type { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import type { PermissionValueType } from '@fastgpt/global/support/permission/type';
import { getResourceClbsAndGroups } from './controller';
import { RequireOnlyOne } from '@fastgpt/global/common/type/utils';
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
import type { RequireOnlyOne } from '@fastgpt/global/common/type/utils';
import type { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
export type SyncChildrenPermissionResourceType = {
_id: string;
@@ -18,6 +18,7 @@ export type UpdateCollaboratorItem = {
} & RequireOnlyOne<{
tmbId: string;
groupId: string;
orgId: string;
}>;
// sync the permission to all children folders.
@@ -161,7 +162,7 @@ export async function resumeInheritPermission({
}
}
/*
/*
Delete all the collaborators and then insert the new collaborators.
*/
export async function syncCollaborators({

View File

@@ -1,9 +1,6 @@
import { MemberGroupSchemaType } from '@fastgpt/global/support/permission/memberGroup/type';
import { MongoGroupMemberModel } from './groupMemberSchema';
import { TeamMemberSchema } from '@fastgpt/global/support/user/team/type';
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { MongoResourcePermission } from '../schema';
import { getGroupPer, parseHeaderCert } from '../controller';
import { parseHeaderCert } from '../controller';
import { MongoMemberGroupModel } from './memberGroupSchema';
import { DefaultGroupName } from '@fastgpt/global/support/user/team/group/constant';
import { ClientSession } from 'mongoose';
@@ -50,26 +47,32 @@ export const getTeamDefaultGroup = async ({
export const getGroupsByTmbId = async ({
tmbId,
teamId,
role
role,
session
}: {
tmbId: string;
teamId: string;
role?: `${GroupMemberRole}`[];
session?: ClientSession;
}) =>
(
await Promise.all([
(
await MongoGroupMemberModel.find({
tmbId,
groupId: {
$exists: true
await MongoGroupMemberModel.find(
{
tmbId,
groupId: {
$exists: true
},
...(role ? { role: { $in: role } } : {})
},
...(role ? { role: { $in: role } } : {})
})
undefined,
{ session }
)
.populate<{ group: MemberGroupSchemaType }>('group')
.lean()
).map((item) => item.group),
role ? [] : getTeamDefaultGroup({ teamId })
role ? [] : getTeamDefaultGroup({ teamId, session })
])
).flat();
@@ -79,46 +82,6 @@ export const getGroupMembersByGroupId = async (groupId: string) => {
}).lean();
};
/**
* Get tmb's group permission: the maximum permission of the group
* @param tmbId
* @param resourceId
* @param resourceType
* @returns the maximum permission of the group
*/
export const getGroupPermission = async ({
tmbId,
resourceId,
teamId,
resourceType
}: {
tmbId: string;
teamId: string;
} & (
| {
resourceId?: undefined;
resourceType: 'team';
}
| {
resourceId: string;
resourceType: Omit<PerResourceTypeEnum, 'team'>;
}
)) => {
const groupIds = (await getGroupsByTmbId({ tmbId, teamId })).map((item) => item._id);
const groupPermissions = (
await MongoResourcePermission.find({
groupId: {
$in: groupIds
},
resourceType,
resourceId,
teamId
})
).map((item) => item.permission);
return getGroupPer(groupPermissions);
};
// auth group member role
export const authGroupMemberRole = async ({
groupId,
@@ -140,8 +103,12 @@ export const authGroupMemberRole = async ({
tmbId
};
}
const groupMember = await MongoGroupMemberModel.findOne({ groupId, tmbId });
const tmb = await getTmbInfoByTmbId({ tmbId });
const [groupMember, tmb] = await Promise.all([
MongoGroupMemberModel.findOne({ groupId, tmbId }),
getTmbInfoByTmbId({ tmbId })
]);
// Team admin or role check
if (tmb.permission.hasManagePer || (groupMember && role.includes(groupMember.role))) {
return {
...result,

View File

@@ -0,0 +1,95 @@
import { TeamErrEnum } from '@fastgpt/global/common/error/code/team';
import type { OrgSchemaType } from '@fastgpt/global/support/user/team/org/type';
import type { ClientSession } from 'mongoose';
import { MongoOrgModel } from './orgSchema';
import { MongoOrgMemberModel } from './orgMemberSchema';
import { getOrgChildrenPath } from '@fastgpt/global/support/user/team/org/constant';
export const getOrgsByTmbId = async ({ teamId, tmbId }: { teamId: string; tmbId: string }) =>
MongoOrgMemberModel.find({ teamId, tmbId }, 'orgId').lean();
export const getOrgIdSetWithParentByTmbId = async ({
teamId,
tmbId
}: {
teamId: string;
tmbId: string;
}) => {
const orgMembers = await MongoOrgMemberModel.find({ teamId, tmbId }, 'orgId').lean();
const orgIds = Array.from(new Set(orgMembers.map((item) => String(item.orgId))));
const orgs = await MongoOrgModel.find({ _id: { $in: orgIds } }, 'path').lean();
const pathIdList = new Set<string>(
orgs
.map((org) => {
const pathIdList = org.path.split('/').filter(Boolean);
return pathIdList;
})
.flat()
);
const parentOrgs = await MongoOrgModel.find(
{
teamId,
pathId: { $in: Array.from(pathIdList) }
},
'_id'
).lean();
const parentOrgIds = parentOrgs.map((item) => String(item._id));
return new Set([...orgIds, ...parentOrgIds]);
};
export const getChildrenByOrg = async ({
org,
teamId,
session
}: {
org: OrgSchemaType;
teamId: string;
session?: ClientSession;
}) => {
return MongoOrgModel.find(
{ teamId, path: { $regex: `^${getOrgChildrenPath(org)}` } },
undefined,
{
session
}
).lean();
};
export const getOrgAndChildren = async ({
orgId,
teamId,
session
}: {
orgId: string;
teamId: string;
session?: ClientSession;
}) => {
const org = await MongoOrgModel.findOne({ _id: orgId, teamId }, undefined, { session }).lean();
if (!org) {
return Promise.reject(TeamErrEnum.orgNotExist);
}
const children = await getChildrenByOrg({ org, teamId, session });
return { org, children };
};
export async function createRootOrg({
teamId,
session
}: {
teamId: string;
session?: ClientSession;
}) {
return MongoOrgModel.create(
[
{
teamId,
name: 'ROOT',
path: ''
}
],
{ session }
);
}

View File

@@ -0,0 +1,65 @@
import { OrgCollectionName } from '@fastgpt/global/support/user/team/org/constant';
import { connectionMongo, getMongoModel } from '../../../common/mongo';
import {
TeamCollectionName,
TeamMemberCollectionName
} from '@fastgpt/global/support/user/team/constant';
import { OrgMemberSchemaType } from '@fastgpt/global/support/user/team/org/type';
const { Schema } = connectionMongo;
export const OrgMemberCollectionName = 'team_org_members';
export const OrgMemberSchema = new Schema({
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
required: true
},
orgId: {
type: Schema.Types.ObjectId,
ref: OrgCollectionName,
required: true
},
tmbId: {
type: Schema.Types.ObjectId,
ref: TeamMemberCollectionName,
required: true
}
// role: {
// type: String,
// enum: Object.values(OrgMemberRole),
// required: true,
// default: OrgMemberRole.member
// }
});
OrgMemberSchema.virtual('org', {
ref: OrgCollectionName,
localField: 'orgId',
foreignField: '_id',
justOne: true
});
try {
OrgMemberSchema.index(
{
teamId: 1,
orgId: 1,
tmbId: 1
},
{
unique: true
}
);
OrgMemberSchema.index({
teamId: 1,
tmbId: 1
});
} catch (error) {
console.log(error);
}
export const MongoOrgMemberModel = getMongoModel<OrgMemberSchemaType>(
OrgMemberCollectionName,
OrgMemberSchema
);

View File

@@ -0,0 +1,77 @@
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
import { OrgCollectionName } from '@fastgpt/global/support/user/team/org/constant';
import type { OrgSchemaType } from '@fastgpt/global/support/user/team/org/type';
import { connectionMongo, getMongoModel } from '../../../common/mongo';
import { OrgMemberCollectionName } from './orgMemberSchema';
import { getNanoid } from '@fastgpt/global/common/string/tools';
const { Schema } = connectionMongo;
export const OrgSchema = new Schema(
{
teamId: {
type: Schema.Types.ObjectId,
ref: TeamCollectionName,
required: true
},
pathId: {
// path id, only used for path
type: String,
required: true,
default: () => getNanoid()
},
path: {
type: String,
required: function (this: OrgSchemaType) {
return typeof this.path !== 'string';
} // allow empty string, but not null
},
name: {
type: String,
required: true
},
avatar: String,
description: String,
updateTime: {
type: Date,
default: () => new Date()
}
},
{
// Auto update updateTime
timestamps: {
updatedAt: 'updateTime'
}
}
);
OrgSchema.virtual('members', {
ref: OrgMemberCollectionName,
localField: '_id',
foreignField: 'orgId'
});
// OrgSchema.virtual('permission', {
// ref: ResourcePermissionCollectionName,
// localField: '_id',
// foreignField: 'orgId',
// justOne: true
// });
try {
OrgSchema.index({
teamId: 1,
path: 1
});
OrgSchema.index(
{
teamId: 1,
pathId: 1
},
{
unique: true
}
);
} catch (error) {
console.log(error);
}
export const MongoOrgModel = getMongoModel<OrgSchemaType>(OrgCollectionName, OrgSchema);

View File

@@ -6,6 +6,7 @@ import { connectionMongo, getMongoModel } from '../../common/mongo';
import type { ResourcePermissionType } from '@fastgpt/global/support/permission/type';
import { PerResourceTypeEnum } from '@fastgpt/global/support/permission/constant';
import { MemberGroupCollectionName } from './memberGroup/memberGroupSchema';
import { OrgCollectionName } from '@fastgpt/global/support/user/team/org/constant';
const { Schema } = connectionMongo;
export const ResourcePermissionCollectionName = 'resource_permissions';
@@ -23,6 +24,10 @@ export const ResourcePermissionSchema = new Schema({
type: Schema.Types.ObjectId,
ref: MemberGroupCollectionName
},
orgId: {
type: Schema.Types.ObjectId,
ref: OrgCollectionName
},
resourceType: {
type: String,
enum: Object.values(PerResourceTypeEnum),
@@ -51,6 +56,12 @@ ResourcePermissionSchema.virtual('group', {
foreignField: '_id',
justOne: true
});
ResourcePermissionSchema.virtual('org', {
ref: OrgCollectionName,
localField: 'orgId',
foreignField: '_id',
justOne: true
});
try {
ResourcePermissionSchema.index(
@@ -70,6 +81,23 @@ try {
}
);
ResourcePermissionSchema.index(
{
resourceType: 1,
teamId: 1,
resourceId: 1,
orgId: 1
},
{
unique: true,
partialFilterExpression: {
orgId: {
$exists: true
}
}
}
);
ResourcePermissionSchema.index(
{
resourceType: 1,
@@ -87,6 +115,7 @@ try {
}
);
// Delete tmb permission
ResourcePermissionSchema.index({
resourceType: 1,
teamId: 1,

View File

@@ -19,9 +19,7 @@ export const checkDatasetLimit = async ({
if (!standardConstants) return;
if (usedDatasetSize + insertLen >= datasetMaxSize) {
return Promise.reject(
`您的知识库容量为: ${datasetMaxSize}组,已使用: ${usedDatasetSize}组,导入当前文件需要: ${insertLen}组,请增加知识库容量后导入。`
);
return Promise.reject(TeamErrEnum.datasetSizeNotEnough);
}
if (usedPoints >= totalPoints) {

View File

@@ -3,22 +3,10 @@ const { Schema } = connectionMongo;
import { hashStr } from '@fastgpt/global/common/string/tools';
import type { UserModelSchema } from '@fastgpt/global/support/user/type';
import { UserStatusEnum, userStatusMap } from '@fastgpt/global/support/user/constant';
import { getRandomUserAvatar } from '@fastgpt/global/support/user/utils';
export const userCollectionName = 'users';
const defaultAvatars = [
'/imgs/avatar/RoyalBlueAvatar.svg',
'/imgs/avatar/PurpleAvatar.svg',
'/imgs/avatar/AdoraAvatar.svg',
'/imgs/avatar/OrangeAvatar.svg',
'/imgs/avatar/RedAvatar.svg',
'/imgs/avatar/GrayModernAvatar.svg',
'/imgs/avatar/TealAvatar.svg',
'/imgs/avatar/GreenAvatar.svg',
'/imgs/avatar/BrightBlueAvatar.svg',
'/imgs/avatar/BlueAvatar.svg'
];
const UserSchema = new Schema({
status: {
type: String,
@@ -47,7 +35,7 @@ const UserSchema = new Schema({
},
avatar: {
type: String,
default: defaultAvatars[Math.floor(Math.random() * defaultAvatars.length)]
default: () => getRandomUserAvatar()
},
promotionRate: {
@@ -78,11 +66,8 @@ const UserSchema = new Schema({
});
try {
// login
UserSchema.index({ username: 1, password: 1 }, { background: true });
// Admin charts
UserSchema.index({ createTime: -1 }, { background: true });
UserSchema.index({ createTime: -1 });
} catch (error) {
console.log(error);
}

View File

@@ -16,6 +16,8 @@ import { MongoMemberGroupModel } from '../../permission/memberGroup/memberGroupS
import { mongoSessionRun } from '../../../common/mongo/sessionRun';
import { DefaultGroupName } from '@fastgpt/global/support/user/team/group/constant';
import { getAIApi, openaiBaseUrl } from '../../../core/ai/config';
import { createRootOrg } from '../../permission/org/controllers';
import { refreshSourceAvatar } from '../../../common/file/image/controller';
async function getTeamMember(match: Record<string, any>): Promise<TeamTmbItemType> {
const tmb = await MongoTeamMember.findOne(match).populate<{ team: TeamSchema }>('team').lean();
@@ -32,6 +34,7 @@ async function getTeamMember(match: Record<string, any>): Promise<TeamTmbItemTyp
return {
userId: String(tmb.userId),
teamId: String(tmb.teamId),
teamAvatar: tmb.team.avatar,
teamName: tmb.team.name,
memberName: tmb.name,
avatar: tmb.team.avatar,
@@ -77,13 +80,11 @@ export async function createDefaultTeam({
userId,
teamName = 'My Team',
avatar = '/icon/logo.svg',
balance,
session
}: {
userId: string;
teamName?: string;
avatar?: string;
balance?: number;
session: ClientSession;
}) {
// auth default team
@@ -100,7 +101,6 @@ export async function createDefaultTeam({
ownerId: userId,
name: teamName,
avatar,
balance,
createTime: new Date()
}
],
@@ -132,15 +132,11 @@ export async function createDefaultTeam({
],
{ session }
);
console.log('create default team and group', userId);
await createRootOrg({ teamId: tmb.teamId, session });
console.log('create default team, group and root org', userId);
return tmb;
} else {
console.log('default team exist', userId);
await MongoTeam.findByIdAndUpdate(tmb.teamId, {
$set: {
...(balance !== undefined && { balance })
}
});
}
}
@@ -215,7 +211,8 @@ export async function updateTeam({
return obj;
})();
await MongoTeam.findByIdAndUpdate(
// This is where we get the old team
const team = await MongoTeam.findByIdAndUpdate(
teamId,
{
$set: {
@@ -241,6 +238,8 @@ export async function updateTeam({
},
{ session }
);
await refreshSourceAvatar(avatar, team?.avatar, session);
}
});
}

View File

@@ -23,10 +23,6 @@ const TeamMemberSchema = new Schema({
type: String,
default: 'Member'
},
role: {
type: String
// enum: Object.keys(TeamMemberRoleMap) // disable enum validation for old data
},
status: {
type: String,
enum: Object.keys(TeamMemberStatusMap)
@@ -38,6 +34,12 @@ const TeamMemberSchema = new Schema({
defaultTeam: {
type: Boolean,
default: false
},
// Abandoned
role: {
type: String
// enum: Object.keys(TeamMemberRoleMap) // disable enum validation for old data
}
});

View File

@@ -21,10 +21,7 @@ const TeamSchema = new Schema({
type: Date,
default: () => Date.now()
},
balance: {
type: Number,
default: 0
},
balance: Number,
teamDomain: {
type: String
},

View File

@@ -61,11 +61,11 @@ const UsageSchema = new Schema({
});
try {
UsageSchema.index({ teamId: 1, tmbId: 1, source: 1, time: -1 }, { background: true });
UsageSchema.index({ teamId: 1, tmbId: 1, source: 1, time: -1 });
// timer task. clear dead team
// UsageSchema.index({ teamId: 1, time: -1 }, { background: true });
// UsageSchema.index({ teamId: 1, time: -1 });
UsageSchema.index({ time: 1 }, { background: true, expireAfterSeconds: 360 * 24 * 60 * 60 });
UsageSchema.index({ time: 1 }, { expireAfterSeconds: 360 * 24 * 60 * 60 });
} catch (error) {
console.log(error);
}

View File

@@ -1,9 +1,27 @@
import TurndownService from 'turndown';
import { ImageType } from '../readFile/type';
import { matchMdImgTextAndUpload } from '@fastgpt/global/common/string/markdown';
import { getNanoid } from '@fastgpt/global/common/string/tools';
// @ts-ignore
const turndownPluginGfm = require('joplin-turndown-plugin-gfm');
const processBase64Images = (htmlContent: string) => {
const base64Regex = /src="data:([^;]+);base64,([^"]+)"/g;
const images: ImageType[] = [];
const processedHtml = htmlContent.replace(base64Regex, (match, mime, base64Data) => {
const uuid = `IMAGE_${getNanoid(12)}_IMAGE`;
images.push({
uuid,
base64: base64Data,
mime
});
return `src="${uuid}"`;
});
return { processedHtml, images };
};
export const html2md = (
html: string
): {
@@ -25,11 +43,14 @@ export const html2md = (
turndownService.remove(['i', 'script', 'iframe', 'style']);
turndownService.use(turndownPluginGfm.gfm);
const { text, imageList } = matchMdImgTextAndUpload(html);
// Base64 img to id, otherwise it will occupy memory when going to md
const { processedHtml, images } = processBase64Images(html);
const md = turndownService.turndown(processedHtml);
const { text, imageList } = matchMdImgTextAndUpload(md);
return {
rawText: turndownService.turndown(text),
imageList
rawText: text,
imageList: [...images, ...imageList]
};
} catch (error) {
console.log('html 2 markdown error', error);

View File

@@ -24,7 +24,11 @@ export const readFileRawText = ({ buffer, encoding }: ReadRawTextByBuffer): Read
return buffer.toString(encoding as BufferEncoding);
}
return iconv.decode(buffer, encoding);
if (encoding) {
return iconv.decode(buffer, encoding);
}
return buffer.toString('utf-8');
} catch (error) {
return buffer.toString('utf-8');
}

View File

@@ -44,13 +44,15 @@ const parsePowerPoint = async ({
}
// Returning an array of all the xml contents read using fs.readFileSync
const xmlContentArray = files.map((file) => {
try {
return fs.readFileSync(`${decompressPath}/${file.path}`, encoding);
} catch (err) {
return fs.readFileSync(`${decompressPath}/${file.path}`, 'utf-8');
}
});
const xmlContentArray = await Promise.all(
files.map((file) => {
try {
return fs.promises.readFile(`${decompressPath}/${file.path}`, encoding);
} catch (err) {
return fs.promises.readFile(`${decompressPath}/${file.path}`, 'utf-8');
}
})
);
let responseArr: string[] = [];