mirror of
https://github.com/labring/FastGPT.git
synced 2025-07-21 11:43:56 +00:00
feat: sync api collection will refresh title;perf: invite link ux (#4237)
* update queue * feat: sync api collection will refresh title * sync collection * remove lock * perf: invite link ux
This commit is contained in:
@@ -12,10 +12,14 @@ weight: 799
|
||||
|
||||
1. 知识库分块增加自定义分隔符预设值,同时支持自定义换行符分割。
|
||||
2. 外部变量改名:自定义变量。 并且支持在测试时调试,在分享链接中,该变量直接隐藏。
|
||||
3. 集合同步时,支持同步修改标题。
|
||||
|
||||
## ⚙️ 优化
|
||||
|
||||
1. 导出对话日志时,支持导出成员名。
|
||||
2. 邀请链接交互。
|
||||
3. 无 SSL 证书时复制失败,会提示弹窗用于手动复制。
|
||||
|
||||
## 🐛 修复
|
||||
|
||||
1. 飞书和语雀知识库无法同步。
|
@@ -124,6 +124,7 @@ curl --location --request GET '{{baseURL}}/v1/file/content?id=xx' \
|
||||
"success": true,
|
||||
"message": "",
|
||||
"data": {
|
||||
"title": "文档标题",
|
||||
"content": "FastGPT 是一个基于 LLM 大语言模型的知识库问答系统,提供开箱即用的数据处理、模型调用等能力。同时可以通过 Flow 可视化进行工作流编排,从而实现复杂的问答场景!\n",
|
||||
"previewUrl": "xxxx"
|
||||
}
|
||||
@@ -131,10 +132,13 @@ curl --location --request GET '{{baseURL}}/v1/file/content?id=xx' \
|
||||
```
|
||||
|
||||
{{% alert icon=" " context="success" %}}
|
||||
二选一返回,如果同时返回则 content 优先级更高。
|
||||
|
||||
- title - 文件标题。
|
||||
- content - 文件内容,直接拿来用。
|
||||
- previewUrl - 文件链接,系统会请求该地址获取文件内容。
|
||||
|
||||
`content`和`previewUrl`二选一返回,如果同时返回则 `content` 优先级更高,返回 `previewUrl`时,则会访问该链接进行文档内容读取。
|
||||
|
||||
{{% /alert %}}
|
||||
|
||||
{{< /markdownify >}}
|
||||
|
@@ -56,7 +56,7 @@ export const replaceSensitiveText = (text: string) => {
|
||||
};
|
||||
|
||||
/* Make sure the first letter is definitely lowercase */
|
||||
export const getNanoid = (size = 12) => {
|
||||
export const getNanoid = (size = 16) => {
|
||||
const firstChar = customAlphabet('abcdefghijklmnopqrstuvwxyz', 1)();
|
||||
|
||||
if (size === 1) return firstChar;
|
||||
|
8
packages/global/core/dataset/apiDataset.d.ts
vendored
8
packages/global/core/dataset/apiDataset.d.ts
vendored
@@ -1,3 +1,5 @@
|
||||
import { RequireOnlyOne } from '../../common/type/utils';
|
||||
|
||||
export type APIFileItem = {
|
||||
id: string;
|
||||
parentId: string | null;
|
||||
@@ -15,9 +17,9 @@ export type APIFileServer = {
|
||||
|
||||
export type APIFileListResponse = APIFileItem[];
|
||||
|
||||
export type APIFileContentResponse = {
|
||||
content?: string;
|
||||
previewUrl?: string;
|
||||
export type ApiFileReadContentResponse = {
|
||||
title?: string;
|
||||
rawText: string;
|
||||
};
|
||||
|
||||
export type APIFileReadResponse = {
|
||||
|
@@ -16,3 +16,7 @@ export const getCollectionSourceData = (collection?: DatasetCollectionSchemaType
|
||||
export const checkCollectionIsFolder = (type: DatasetCollectionTypeEnum) => {
|
||||
return type === DatasetCollectionTypeEnum.folder || type === DatasetCollectionTypeEnum.virtual;
|
||||
};
|
||||
|
||||
export const collectionCanSync = (type: DatasetCollectionTypeEnum) => {
|
||||
return [DatasetCollectionTypeEnum.link, DatasetCollectionTypeEnum.apiFile].includes(type);
|
||||
};
|
||||
|
@@ -13,38 +13,38 @@ export enum DatasetTypeEnum {
|
||||
export const DatasetTypeMap = {
|
||||
[DatasetTypeEnum.folder]: {
|
||||
icon: 'common/folderFill',
|
||||
label: 'folder_dataset',
|
||||
collectionLabel: 'common.Folder'
|
||||
label: i18nT('dataset:folder_dataset'),
|
||||
collectionLabel: i18nT('common:Folder')
|
||||
},
|
||||
[DatasetTypeEnum.dataset]: {
|
||||
icon: 'core/dataset/commonDatasetOutline',
|
||||
label: 'common_dataset',
|
||||
collectionLabel: 'common.File'
|
||||
label: i18nT('dataset:common_dataset'),
|
||||
collectionLabel: i18nT('common:common.File')
|
||||
},
|
||||
[DatasetTypeEnum.websiteDataset]: {
|
||||
icon: 'core/dataset/websiteDatasetOutline',
|
||||
label: 'website_dataset',
|
||||
collectionLabel: 'common.Website'
|
||||
label: i18nT('dataset:website_dataset'),
|
||||
collectionLabel: i18nT('common:common.Website')
|
||||
},
|
||||
[DatasetTypeEnum.externalFile]: {
|
||||
icon: 'core/dataset/externalDatasetOutline',
|
||||
label: 'external_file',
|
||||
collectionLabel: 'common.File'
|
||||
label: i18nT('dataset:external_file'),
|
||||
collectionLabel: i18nT('common:common.File')
|
||||
},
|
||||
[DatasetTypeEnum.apiDataset]: {
|
||||
icon: 'core/dataset/externalDatasetOutline',
|
||||
label: 'api_file',
|
||||
collectionLabel: 'common.File'
|
||||
label: i18nT('dataset:api_file'),
|
||||
collectionLabel: i18nT('common:common.File')
|
||||
},
|
||||
[DatasetTypeEnum.feishu]: {
|
||||
icon: 'core/dataset/feishuDatasetOutline',
|
||||
label: 'feishu_dataset',
|
||||
collectionLabel: 'common.File'
|
||||
label: i18nT('dataset:feishu_dataset'),
|
||||
collectionLabel: i18nT('common:common.File')
|
||||
},
|
||||
[DatasetTypeEnum.yuque]: {
|
||||
icon: 'core/dataset/yuqueDatasetOutline',
|
||||
label: 'yuque_dataset',
|
||||
collectionLabel: 'common.File'
|
||||
label: i18nT('dataset:yuque_dataset'),
|
||||
collectionLabel: i18nT('common:common.File')
|
||||
}
|
||||
};
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
import type {
|
||||
APIFileContentResponse,
|
||||
APIFileListResponse,
|
||||
ApiFileReadContentResponse,
|
||||
APIFileReadResponse,
|
||||
APIFileServer
|
||||
} from '@fastgpt/global/core/dataset/apiDataset';
|
||||
@@ -8,6 +8,7 @@ import axios, { Method } from 'axios';
|
||||
import { addLog } from '../../../common/system/log';
|
||||
import { readFileRawTextByUrl } from '../read';
|
||||
import { ParentIdType } from '@fastgpt/global/common/parentFolder/type';
|
||||
import { RequireOnlyOne } from '@fastgpt/global/common/type/utils';
|
||||
|
||||
type ResponseDataType = {
|
||||
success: boolean;
|
||||
@@ -118,17 +119,24 @@ export const useApiDatasetRequest = ({ apiServer }: { apiServer: APIFileServer }
|
||||
tmbId: string;
|
||||
apiFileId: string;
|
||||
customPdfParse?: boolean;
|
||||
}) => {
|
||||
const data = await request<APIFileContentResponse>(
|
||||
`/v1/file/content`,
|
||||
{ id: apiFileId },
|
||||
'GET'
|
||||
);
|
||||
}): Promise<ApiFileReadContentResponse> => {
|
||||
const data = await request<
|
||||
{
|
||||
title?: string;
|
||||
} & RequireOnlyOne<{
|
||||
content: string;
|
||||
previewUrl: string;
|
||||
}>
|
||||
>(`/v1/file/content`, { id: apiFileId }, 'GET');
|
||||
const title = data.title;
|
||||
const content = data.content;
|
||||
const previewUrl = data.previewUrl;
|
||||
|
||||
if (content) {
|
||||
return content;
|
||||
return {
|
||||
title,
|
||||
rawText: content
|
||||
};
|
||||
}
|
||||
if (previewUrl) {
|
||||
const rawText = await readFileRawTextByUrl({
|
||||
@@ -138,7 +146,10 @@ export const useApiDatasetRequest = ({ apiServer }: { apiServer: APIFileServer }
|
||||
relatedId: apiFileId,
|
||||
customPdfParse
|
||||
});
|
||||
return rawText;
|
||||
return {
|
||||
title,
|
||||
rawText
|
||||
};
|
||||
}
|
||||
return Promise.reject('Invalid content type: content or previewUrl is required');
|
||||
};
|
||||
|
@@ -11,7 +11,6 @@ import {
|
||||
DatasetCollectionSyncResultEnum,
|
||||
DatasetCollectionTypeEnum,
|
||||
DatasetSourceReadTypeEnum,
|
||||
DatasetTypeEnum,
|
||||
TrainingModeEnum
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { DatasetErrEnum } from '@fastgpt/global/common/error/code/dataset';
|
||||
@@ -19,6 +18,7 @@ import { readDatasetSourceRawText } from '../read';
|
||||
import { hashStr } from '@fastgpt/global/common/string/tools';
|
||||
import { mongoSessionRun } from '../../../common/mongo/sessionRun';
|
||||
import { createCollectionAndInsertData, delCollection } from './controller';
|
||||
import { collectionCanSync } from '@fastgpt/global/core/dataset/collection/utils';
|
||||
|
||||
/**
|
||||
* get all collection by top collectionId
|
||||
@@ -137,10 +137,7 @@ export const collectionTagsToTagLabel = async ({
|
||||
export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
const dataset = collection.dataset;
|
||||
|
||||
if (
|
||||
collection.type !== DatasetCollectionTypeEnum.link &&
|
||||
dataset.type !== DatasetTypeEnum.apiDataset
|
||||
) {
|
||||
if (!collectionCanSync(collection.type)) {
|
||||
return Promise.reject(DatasetErrEnum.notSupportSync);
|
||||
}
|
||||
|
||||
@@ -155,15 +152,20 @@ export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
};
|
||||
}
|
||||
|
||||
if (!collection.apiFileId) return Promise.reject('apiFileId is missing');
|
||||
if (!dataset.apiServer) return Promise.reject('apiServer not found');
|
||||
const sourceId = collection.apiFileId;
|
||||
|
||||
if (!sourceId) return Promise.reject('apiFileId is missing');
|
||||
|
||||
return {
|
||||
type: DatasetSourceReadTypeEnum.apiFile,
|
||||
sourceId: collection.apiFileId,
|
||||
apiServer: dataset.apiServer
|
||||
sourceId,
|
||||
apiServer: dataset.apiServer,
|
||||
feishuServer: dataset.feishuServer,
|
||||
yuqueServer: dataset.yuqueServer
|
||||
};
|
||||
})();
|
||||
const rawText = await readDatasetSourceRawText({
|
||||
|
||||
const { title, rawText } = await readDatasetSourceRawText({
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
...sourceReadType
|
||||
@@ -196,7 +198,7 @@ export const syncCollection = async (collection: CollectionWithDatasetType) => {
|
||||
createCollectionParams: {
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
name: collection.name,
|
||||
name: title || collection.name,
|
||||
datasetId: collection.datasetId,
|
||||
parentId: collection.parentId,
|
||||
type: collection.type,
|
||||
|
@@ -79,9 +79,12 @@ export const readDatasetSourceRawText = async ({
|
||||
apiServer?: APIFileServer; // api dataset
|
||||
feishuServer?: FeishuServer; // feishu dataset
|
||||
yuqueServer?: YuqueServer; // yuque dataset
|
||||
}): Promise<string> => {
|
||||
}): Promise<{
|
||||
title?: string;
|
||||
rawText: string;
|
||||
}> => {
|
||||
if (type === DatasetSourceReadTypeEnum.fileLocal) {
|
||||
const { rawText } = await readFileContentFromMongo({
|
||||
const { filename, rawText } = await readFileContentFromMongo({
|
||||
teamId,
|
||||
tmbId,
|
||||
bucketName: BucketNameEnum.dataset,
|
||||
@@ -89,14 +92,20 @@ export const readDatasetSourceRawText = async ({
|
||||
isQAImport,
|
||||
customPdfParse
|
||||
});
|
||||
return rawText;
|
||||
return {
|
||||
title: filename,
|
||||
rawText
|
||||
};
|
||||
} else if (type === DatasetSourceReadTypeEnum.link) {
|
||||
const result = await urlsFetch({
|
||||
urlList: [sourceId],
|
||||
selector
|
||||
});
|
||||
|
||||
return result[0]?.content || '';
|
||||
return {
|
||||
title: result[0]?.title,
|
||||
rawText: result[0]?.content || ''
|
||||
};
|
||||
} else if (type === DatasetSourceReadTypeEnum.externalFile) {
|
||||
if (!externalFileId) return Promise.reject('FileId not found');
|
||||
const rawText = await readFileRawTextByUrl({
|
||||
@@ -106,9 +115,11 @@ export const readDatasetSourceRawText = async ({
|
||||
relatedId: externalFileId,
|
||||
customPdfParse
|
||||
});
|
||||
return rawText;
|
||||
return {
|
||||
rawText
|
||||
};
|
||||
} else if (type === DatasetSourceReadTypeEnum.apiFile) {
|
||||
const rawText = await readApiServerFileContent({
|
||||
const { title, rawText } = await readApiServerFileContent({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
@@ -116,9 +127,15 @@ export const readDatasetSourceRawText = async ({
|
||||
teamId,
|
||||
tmbId
|
||||
});
|
||||
return rawText;
|
||||
return {
|
||||
title,
|
||||
rawText
|
||||
};
|
||||
}
|
||||
return '';
|
||||
return {
|
||||
title: '',
|
||||
rawText: ''
|
||||
};
|
||||
};
|
||||
|
||||
export const readApiServerFileContent = async ({
|
||||
@@ -137,7 +154,10 @@ export const readApiServerFileContent = async ({
|
||||
teamId: string;
|
||||
tmbId: string;
|
||||
customPdfParse?: boolean;
|
||||
}) => {
|
||||
}): Promise<{
|
||||
title?: string;
|
||||
rawText: string;
|
||||
}> => {
|
||||
if (apiServer) {
|
||||
return useApiDatasetRequest({ apiServer }).getFileContent({
|
||||
teamId,
|
||||
@@ -148,7 +168,10 @@ export const readApiServerFileContent = async ({
|
||||
}
|
||||
|
||||
if (feishuServer || yuqueServer) {
|
||||
return POST<string>(`/core/dataset/systemApiDataset`, {
|
||||
return POST<{
|
||||
title?: string;
|
||||
rawText: string;
|
||||
}>(`/core/dataset/systemApiDataset`, {
|
||||
type: 'content',
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
|
@@ -1,52 +0,0 @@
|
||||
import { TeamCollectionName } from '@fastgpt/global/support/user/team/constant';
|
||||
import { connectionMongo, getMongoModel } from '../../../../common/mongo';
|
||||
import { InvitationSchemaType } from './type';
|
||||
import { randomUUID } from 'crypto';
|
||||
const { Schema } = connectionMongo;
|
||||
|
||||
export const InvitationCollectionName = 'team_invitation_links';
|
||||
|
||||
const InvitationSchema = new Schema({
|
||||
linkId: {
|
||||
type: String,
|
||||
required: true,
|
||||
unique: true,
|
||||
default: () => randomUUID()
|
||||
},
|
||||
teamId: {
|
||||
type: Schema.Types.ObjectId,
|
||||
ref: TeamCollectionName,
|
||||
required: true
|
||||
},
|
||||
usedTimesLimit: {
|
||||
type: Number,
|
||||
default: 1,
|
||||
enum: [1, -1]
|
||||
},
|
||||
forbidden: Boolean,
|
||||
expires: Date,
|
||||
description: String,
|
||||
members: {
|
||||
type: [String],
|
||||
default: []
|
||||
}
|
||||
});
|
||||
|
||||
InvitationSchema.virtual('team', {
|
||||
ref: TeamCollectionName,
|
||||
localField: 'teamId',
|
||||
foreignField: '_id',
|
||||
justOne: true
|
||||
});
|
||||
|
||||
try {
|
||||
InvitationSchema.index({ teamId: 1 });
|
||||
InvitationSchema.index({ expires: 1 }, { expireAfterSeconds: 30 * 24 * 60 * 60 });
|
||||
} catch (error) {
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
export const MongoInvitationLink = getMongoModel<InvitationSchemaType>(
|
||||
InvitationCollectionName,
|
||||
InvitationSchema
|
||||
);
|
8
packages/web/common/zustand/index.ts
Normal file
8
packages/web/common/zustand/index.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import zustandNpm from 'zustand';
|
||||
|
||||
export * from 'zustand';
|
||||
export * from 'zustand/middleware';
|
||||
export * from 'zustand/middleware/immer';
|
||||
|
||||
export * from 'zustand';
|
||||
export default zustandNpm;
|
@@ -3,6 +3,11 @@ import { useToast } from './useToast';
|
||||
import { useCallback } from 'react';
|
||||
import { hasHttps } from '../common/system/utils';
|
||||
import { isProduction } from '@fastgpt/global/common/system/constants';
|
||||
import MyModal from '../components/common/MyModal';
|
||||
import React from 'react';
|
||||
import { Box, ModalBody } from '@chakra-ui/react';
|
||||
import Tag from '../components/common/Tag';
|
||||
import { useCommonStore } from '../store/useCommonStore';
|
||||
|
||||
/**
|
||||
* copy text data
|
||||
@@ -10,43 +15,15 @@ import { isProduction } from '@fastgpt/global/common/system/constants';
|
||||
export const useCopyData = () => {
|
||||
const { t } = useTranslation();
|
||||
const { toast } = useToast();
|
||||
const { setCopyContent } = useCommonStore();
|
||||
|
||||
const copyData = useCallback(
|
||||
async (
|
||||
data: string,
|
||||
title: string | null = t('common:common.Copy Successful'),
|
||||
duration = 1000
|
||||
) => {
|
||||
async (data: string, title = t('common:common.Copy Successful'), duration = 1000) => {
|
||||
data = data.trim();
|
||||
|
||||
try {
|
||||
if ((hasHttps() || !isProduction) && navigator.clipboard) {
|
||||
await navigator.clipboard.writeText(data);
|
||||
} else {
|
||||
throw new Error('');
|
||||
}
|
||||
} catch (error) {
|
||||
// console.log(error);
|
||||
|
||||
const textarea = document.createElement('textarea');
|
||||
textarea.value = data;
|
||||
textarea.style.position = 'absolute';
|
||||
textarea.style.opacity = '0';
|
||||
document.body.appendChild(textarea);
|
||||
|
||||
textarea.select();
|
||||
const res = document.execCommand('copy');
|
||||
document.body.removeChild(textarea);
|
||||
|
||||
if (!res) {
|
||||
return toast({
|
||||
title: t('common:common.Copy_failed'),
|
||||
status: 'error',
|
||||
duration
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (title) {
|
||||
toast({
|
||||
title,
|
||||
@@ -54,6 +31,12 @@ export const useCopyData = () => {
|
||||
duration
|
||||
});
|
||||
}
|
||||
} else {
|
||||
throw new Error('');
|
||||
}
|
||||
} catch (error) {
|
||||
setCopyContent(data);
|
||||
}
|
||||
},
|
||||
[t, toast]
|
||||
);
|
||||
@@ -62,3 +45,29 @@ export const useCopyData = () => {
|
||||
copyData
|
||||
};
|
||||
};
|
||||
|
||||
export const ManualCopyModal = () => {
|
||||
const { t } = useTranslation();
|
||||
const { copyContent, setCopyContent } = useCommonStore();
|
||||
|
||||
return (
|
||||
<MyModal
|
||||
isOpen={!!copyContent}
|
||||
iconSrc="copy"
|
||||
iconColor="primary.600"
|
||||
title={t('common:common.Copy')}
|
||||
maxW={['90vw', '500px']}
|
||||
w={'100%'}
|
||||
onClose={() => setCopyContent(undefined)}
|
||||
>
|
||||
<ModalBody>
|
||||
<Tag w={'100%'} colorSchema="blue">
|
||||
{t('common:can_copy_content_tip')}
|
||||
</Tag>
|
||||
<Box mt={3} borderRadius={'md'} p={3} border={'base'} userSelect={'all'}>
|
||||
{copyContent}
|
||||
</Box>
|
||||
</ModalBody>
|
||||
</MyModal>
|
||||
);
|
||||
};
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"edit_member_tip": "username",
|
||||
"edit_info": "Edit information",
|
||||
"edit_org_info": "Edit organization information",
|
||||
"expires": "Expiration",
|
||||
"expires": "Expiration time",
|
||||
"forbid_hint": "After forbidden, this invitation link will become invalid. This action is irreversible. Are you sure you want to deactivate?",
|
||||
"forbid_success": "Forbid success",
|
||||
"forbidden": "Forbidden",
|
||||
|
@@ -37,6 +37,7 @@
|
||||
"add_new_param": "Add new param",
|
||||
"app.templateMarket.templateTags.Writing": "Writing",
|
||||
"back": "Back",
|
||||
"can_copy_content_tip": "It is not possible to copy automatically using the browser, please manually copy the following content",
|
||||
"chose_condition": "Choose Condition",
|
||||
"chosen": "Chosen",
|
||||
"classification": "Classification",
|
||||
@@ -128,7 +129,6 @@
|
||||
"common.Continue_Adding": "Continue adding",
|
||||
"common.Copy": "Copy",
|
||||
"common.Copy Successful": "Copied Successfully",
|
||||
"common.Copy_failed": "Copy Failed, Please Copy Manually",
|
||||
"common.Create Failed": "Creation Failed",
|
||||
"common.Create Success": "Created Successfully",
|
||||
"common.Create Time": "Creation Time",
|
||||
@@ -645,11 +645,11 @@
|
||||
"core.dataset.training.Auto mode": "Auto index",
|
||||
"core.dataset.training.Auto mode Tip": "Increase the semantic richness of data blocks by generating related questions and summaries through sub-indexes and calling models, making it more conducive to retrieval. Requires more storage space and increases AI call times.",
|
||||
"core.dataset.training.Chunk mode": "Chunk",
|
||||
"core.dataset.training.Full": "Estimated Over 5 Minutes",
|
||||
"core.dataset.training.Full": "It is expected to be more than 20 minutes",
|
||||
"core.dataset.training.Leisure": "Idle",
|
||||
"core.dataset.training.QA mode": "QA",
|
||||
"core.dataset.training.Vector queue": "Index Queue",
|
||||
"core.dataset.training.Waiting": "Estimated 5 Minutes",
|
||||
"core.dataset.training.Waiting": "Estimated 20 minutes",
|
||||
"core.dataset.training.Website Sync": "Website Sync",
|
||||
"core.dataset.training.tag": "Queue Status",
|
||||
"core.dataset.website.Base Url": "Base URL",
|
||||
|
@@ -5,6 +5,7 @@
|
||||
"api_url": "API Url",
|
||||
"auto_indexes": "Automatically generate supplementary indexes",
|
||||
"auto_indexes_tips": "Additional index generation is performed through large models to improve semantic richness and improve retrieval accuracy.",
|
||||
"auto_training_queue": "Enhanced index queueing",
|
||||
"chunk_max_tokens": "max_tokens",
|
||||
"close_auto_sync": "Are you sure you want to turn off automatic sync?",
|
||||
"collection.Create update time": "Creation/Update Time",
|
||||
@@ -56,6 +57,7 @@
|
||||
"ideal_chunk_length_tips": "Segment according to the end symbol and combine multiple segments into one block. This value determines the estimated size of the block, if there is any fluctuation.",
|
||||
"image_auto_parse": "Automatic image indexing",
|
||||
"image_auto_parse_tips": "Call VLM to automatically label the pictures in the document and generate additional search indexes",
|
||||
"image_training_queue": "Queue of image processing",
|
||||
"import.Auto mode Estimated Price Tips": "The text understanding model needs to be called, which requires more points: {{price}} points/1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "Only use the index model and consume a small amount of AI points: {{price}} points/1K tokens",
|
||||
"import_confirm": "Confirm upload",
|
||||
|
@@ -24,7 +24,7 @@
|
||||
"edit_member_tip": "用户名",
|
||||
"edit_info": "编辑信息",
|
||||
"edit_org_info": "编辑部门信息",
|
||||
"expires": "有效期",
|
||||
"expires": "过期时间",
|
||||
"export_members": "导出成员",
|
||||
"forbid_hint": "停用后,该邀请链接将失效。 该操作不可撤销,是否确认停用?",
|
||||
"forbid_success": "停用成功",
|
||||
|
@@ -41,6 +41,7 @@
|
||||
"app.templateMarket.templateTags.Web_search": "联网搜索",
|
||||
"app.templateMarket.templateTags.Writing": "文本创作",
|
||||
"back": "返回",
|
||||
"can_copy_content_tip": "无法使用浏览器自动复制,请手动复制下面内容",
|
||||
"chose_condition": "选择条件",
|
||||
"chosen": "已选",
|
||||
"classification": "分类",
|
||||
@@ -132,7 +133,6 @@
|
||||
"common.Continue_Adding": "继续添加",
|
||||
"common.Copy": "复制",
|
||||
"common.Copy Successful": "复制成功",
|
||||
"common.Copy_failed": "复制失败,请手动复制",
|
||||
"common.Create Failed": "创建异常",
|
||||
"common.Create Success": "创建成功",
|
||||
"common.Create Time": "创建时间",
|
||||
@@ -648,11 +648,11 @@
|
||||
"core.dataset.training.Auto mode": "补充索引",
|
||||
"core.dataset.training.Auto mode Tip": "通过子索引以及调用模型生成相关问题与摘要,来增加数据块的语义丰富度,更利于检索。需要消耗更多的存储空间和增加 AI 调用次数。",
|
||||
"core.dataset.training.Chunk mode": "直接分块",
|
||||
"core.dataset.training.Full": "预计 5 分钟以上",
|
||||
"core.dataset.training.Full": "预计 20 分钟以上",
|
||||
"core.dataset.training.Leisure": "空闲",
|
||||
"core.dataset.training.QA mode": "问答对提取",
|
||||
"core.dataset.training.Vector queue": "索引排队",
|
||||
"core.dataset.training.Waiting": "预计 5 分钟",
|
||||
"core.dataset.training.Waiting": "预计 20 分钟",
|
||||
"core.dataset.training.Website Sync": "Web 站点同步",
|
||||
"core.dataset.training.tag": "排队情况",
|
||||
"core.dataset.website.Base Url": "根地址",
|
||||
|
@@ -5,6 +5,7 @@
|
||||
"api_url": "接口地址",
|
||||
"auto_indexes": "自动生成补充索引",
|
||||
"auto_indexes_tips": "通过大模型进行额外索引生成,提高语义丰富度,提高检索的精度。",
|
||||
"auto_training_queue": "增强索引排队",
|
||||
"chunk_max_tokens": "分块上限",
|
||||
"close_auto_sync": "确认关闭自动同步功能?",
|
||||
"collection.Create update time": "创建/更新时间",
|
||||
@@ -56,6 +57,7 @@
|
||||
"ideal_chunk_length_tips": "按结束符号进行分段,并将多个分段组成一个分块,该值决定了分块的预估大小,如果会有上下浮动。",
|
||||
"image_auto_parse": "图片自动索引",
|
||||
"image_auto_parse_tips": "调用 VLM 自动标注文档里的图片,并生成额外的检索索引",
|
||||
"image_training_queue": "图片处理排队",
|
||||
"import.Auto mode Estimated Price Tips": "需调用文本理解模型,需要消耗较多AI 积分:{{price}} 积分/1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "仅使用索引模型,消耗少量 AI 积分:{{price}} 积分/1K tokens",
|
||||
"import_confirm": "确认上传",
|
||||
|
@@ -21,7 +21,7 @@
|
||||
"edit_member_tip": "使用者名稱",
|
||||
"edit_info": "編輯訊息",
|
||||
"edit_org_info": "編輯部門資訊",
|
||||
"expires": "有效期",
|
||||
"expires": "過期時間",
|
||||
"forbid_hint": "停用後,該邀請連結將失效。 該操作不可撤銷,是否確認停用?",
|
||||
"forbid_success": "停用成功",
|
||||
"forbidden": "停用",
|
||||
|
@@ -37,6 +37,7 @@
|
||||
"add_new_param": "新增參數",
|
||||
"app.templateMarket.templateTags.Writing": "文字創作",
|
||||
"back": "返回",
|
||||
"can_copy_content_tip": "無法使用瀏覽器自動複製,請手動複製下面內容",
|
||||
"chose_condition": "選擇條件",
|
||||
"chosen": "已選擇",
|
||||
"classification": "分類",
|
||||
@@ -127,7 +128,6 @@
|
||||
"common.Continue_Adding": "繼續新增",
|
||||
"common.Copy": "複製",
|
||||
"common.Copy Successful": "複製成功",
|
||||
"common.Copy_failed": "複製失敗,請手動複製",
|
||||
"common.Create Failed": "建立失敗",
|
||||
"common.Create Success": "建立成功",
|
||||
"common.Create Time": "建立時間",
|
||||
@@ -644,11 +644,11 @@
|
||||
"core.dataset.training.Auto mode": "補充索引",
|
||||
"core.dataset.training.Auto mode Tip": "透過子索引以及呼叫模型產生相關問題與摘要,來增加資料區塊的語意豐富度,更有利於檢索。需要消耗更多的儲存空間並增加 AI 呼叫次數。",
|
||||
"core.dataset.training.Chunk mode": "直接分块",
|
||||
"core.dataset.training.Full": "預計超過 5 分鐘",
|
||||
"core.dataset.training.Full": "預計 20 分鐘以上",
|
||||
"core.dataset.training.Leisure": "閒置",
|
||||
"core.dataset.training.QA mode": "問答對提取",
|
||||
"core.dataset.training.Vector queue": "索引排隊中",
|
||||
"core.dataset.training.Waiting": "預計 5 分鐘",
|
||||
"core.dataset.training.Waiting": "預計 20 分鐘",
|
||||
"core.dataset.training.Website Sync": "網站同步",
|
||||
"core.dataset.training.tag": "排隊狀況",
|
||||
"core.dataset.website.Base Url": "根網址",
|
||||
|
@@ -5,6 +5,7 @@
|
||||
"api_url": "介面位址",
|
||||
"auto_indexes": "自動生成補充索引",
|
||||
"auto_indexes_tips": "通過大模型進行額外索引生成,提高語義豐富度,提高檢索的精度。",
|
||||
"auto_training_queue": "增強索引排隊",
|
||||
"chunk_max_tokens": "分塊上限",
|
||||
"close_auto_sync": "確認關閉自動同步功能?",
|
||||
"collection.Create update time": "建立/更新時間",
|
||||
@@ -56,6 +57,7 @@
|
||||
"ideal_chunk_length_tips": "依結束符號進行分段,並將多個分段組成一個分塊,此值決定了分塊的預估大小,可能會有上下浮動。",
|
||||
"image_auto_parse": "圖片自動索引",
|
||||
"image_auto_parse_tips": "調用 VLM 自動標註文檔裡的圖片,並生成額外的檢索索引",
|
||||
"image_training_queue": "圖片處理排隊",
|
||||
"import.Auto mode Estimated Price Tips": "需呼叫文字理解模型,將消耗較多 AI 點數:{{price}} 點數 / 1K tokens",
|
||||
"import.Embedding Estimated Price Tips": "僅使用索引模型,消耗少量 AI 點數:{{price}} 點數 / 1K tokens",
|
||||
"import_confirm": "確認上傳",
|
||||
|
@@ -34,7 +34,8 @@
|
||||
"react-hook-form": "7.43.1",
|
||||
"react-i18next": "14.1.2",
|
||||
"react-photo-view": "^1.2.6",
|
||||
"use-context-selector": "^1.4.4"
|
||||
"use-context-selector": "^1.4.4",
|
||||
"zustand": "^4.3.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/js-cookie": "^3.0.5",
|
||||
|
25
packages/web/store/useCommonStore.ts
Normal file
25
packages/web/store/useCommonStore.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { create, devtools, persist, immer } from '../common/zustand';
|
||||
|
||||
type State = {
|
||||
copyContent?: string;
|
||||
setCopyContent: (val?: string) => void;
|
||||
};
|
||||
|
||||
export const useCommonStore = create<State>()(
|
||||
devtools(
|
||||
persist(
|
||||
immer((set, get) => ({
|
||||
copyContent: undefined,
|
||||
setCopyContent(val) {
|
||||
set((state) => {
|
||||
state.copyContent = val;
|
||||
});
|
||||
}
|
||||
})),
|
||||
{
|
||||
name: 'commonStore',
|
||||
partialize: (state) => ({})
|
||||
}
|
||||
)
|
||||
)
|
||||
);
|
6
pnpm-lock.yaml
generated
6
pnpm-lock.yaml
generated
@@ -398,6 +398,9 @@ importers:
|
||||
use-context-selector:
|
||||
specifier: ^1.4.4
|
||||
version: 1.4.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(scheduler@0.23.2)
|
||||
zustand:
|
||||
specifier: ^4.3.5
|
||||
version: 4.5.6(@types/react@18.3.1)(immer@9.0.21)(react@18.3.1)
|
||||
devDependencies:
|
||||
'@types/js-cookie':
|
||||
specifier: ^3.0.5
|
||||
@@ -588,9 +591,6 @@ importers:
|
||||
use-context-selector:
|
||||
specifier: ^1.4.4
|
||||
version: 1.4.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(scheduler@0.23.2)
|
||||
zustand:
|
||||
specifier: ^4.3.5
|
||||
version: 4.5.6(@types/react@18.3.1)(immer@9.0.21)(react@18.3.1)
|
||||
devDependencies:
|
||||
'@svgr/webpack':
|
||||
specifier: ^6.5.1
|
||||
|
@@ -64,8 +64,7 @@
|
||||
"request-ip": "^3.3.0",
|
||||
"sass": "^1.58.3",
|
||||
"use-context-selector": "^1.4.4",
|
||||
"@node-rs/jieba": "2.0.1",
|
||||
"zustand": "^4.3.5"
|
||||
"@node-rs/jieba": "2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@svgr/webpack": "^6.5.1",
|
||||
|
@@ -22,6 +22,9 @@ const NotSufficientModal = dynamic(() => import('@/components/support/wallet/Not
|
||||
const SystemMsgModal = dynamic(() => import('@/components/support/user/inform/SystemMsgModal'));
|
||||
const ImportantInform = dynamic(() => import('@/components/support/user/inform/ImportantInform'));
|
||||
const UpdateContact = dynamic(() => import('@/components/support/user/inform/UpdateContactModal'));
|
||||
const ManualCopyModal = dynamic(() =>
|
||||
import('@fastgpt/web/hooks/useCopyData').then((mod) => mod.ManualCopyModal)
|
||||
);
|
||||
|
||||
const pcUnShowLayoutRoute: Record<string, boolean> = {
|
||||
'/': true,
|
||||
@@ -162,6 +165,7 @@ const Layout = ({ children }: { children: JSX.Element }) => {
|
||||
</>
|
||||
)}
|
||||
|
||||
<ManualCopyModal />
|
||||
<Loading loading={loading} zIndex={999999} />
|
||||
</>
|
||||
);
|
||||
|
@@ -48,15 +48,6 @@ export type InsertOneDatasetDataProps = PushDatasetDataChunkProps & {
|
||||
collectionId: string;
|
||||
};
|
||||
|
||||
export type GetTrainingQueueProps = {
|
||||
vectorModel: string;
|
||||
agentModel: string;
|
||||
};
|
||||
export type GetTrainingQueueResponse = {
|
||||
vectorTrainingCount: number;
|
||||
agentTrainingCount: number;
|
||||
};
|
||||
|
||||
/* -------------- search ---------------- */
|
||||
export type SearchTestProps = {
|
||||
datasetId: string;
|
||||
|
@@ -22,7 +22,13 @@ import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { useForm } from 'react-hook-form';
|
||||
|
||||
function CreateInvitationModal({ onClose }: { onClose: (linkId?: string) => void }) {
|
||||
function CreateInvitationModal({
|
||||
onSuccess,
|
||||
onClose
|
||||
}: {
|
||||
onSuccess: (linkId: string) => void;
|
||||
onClose: () => void;
|
||||
}) {
|
||||
const { t } = useTranslation();
|
||||
const expiresOptions: Array<{ label: string; value: InvitationLinkExpiresType }> = [
|
||||
{ label: t('account_team:30mins'), value: '30m' }, // 30 mins
|
||||
@@ -43,12 +49,11 @@ function CreateInvitationModal({ onClose }: { onClose: (linkId?: string) => void
|
||||
|
||||
const { runAsync: createInvitationLink, loading } = useRequest2(postCreateInvitationLink, {
|
||||
manual: true,
|
||||
successToast: t('common:common.Create Success'),
|
||||
errorToast: t('common:common.Create Failed'),
|
||||
onSuccess: (data) => {
|
||||
onClose(data);
|
||||
},
|
||||
onFinally: () => onClose()
|
||||
onSuccess(data);
|
||||
onClose();
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
|
@@ -35,15 +35,7 @@ import { useCallback } from 'react';
|
||||
|
||||
const CreateInvitationModal = dynamic(() => import('./CreateInvitationModal'));
|
||||
|
||||
const InviteModal = ({
|
||||
teamId,
|
||||
onClose,
|
||||
onSuccess
|
||||
}: {
|
||||
teamId: string;
|
||||
onClose: () => void;
|
||||
onSuccess: () => void;
|
||||
}) => {
|
||||
const InviteModal = ({ onClose }: { onClose: () => void }) => {
|
||||
const { t } = useTranslation();
|
||||
|
||||
const {
|
||||
@@ -57,10 +49,10 @@ const InviteModal = ({
|
||||
const { isOpen: isOpenCreate, onOpen: onOpenCreate, onClose: onCloseCreate } = useDisclosure();
|
||||
|
||||
const isLoading = isLoadingLink;
|
||||
|
||||
const { copyData } = useCopyData();
|
||||
const { userInfo } = useUserStore();
|
||||
const { feConfigs } = useSystemStore();
|
||||
|
||||
const onCopy = useCallback(
|
||||
(linkId: string) => {
|
||||
const url = location.origin + `/account/team?invitelinkid=${linkId}`;
|
||||
@@ -76,7 +68,7 @@ const InviteModal = ({
|
||||
})
|
||||
);
|
||||
},
|
||||
[copyData]
|
||||
[copyData, feConfigs?.systemTitle, t, userInfo?.team.memberName, userInfo?.team.teamName]
|
||||
);
|
||||
|
||||
const { runAsync: onForbid, loading: forbiding } = useRequest2(putForbidInvitationLink, {
|
||||
@@ -131,13 +123,7 @@ const InviteModal = ({
|
||||
<Td maxW="200px" minW="100px">
|
||||
{item.description}
|
||||
</Td>
|
||||
<Td>
|
||||
{isForbidden ? (
|
||||
<Tag colorSchema="gray">{t('account_team:has_forbidden')}</Tag>
|
||||
) : (
|
||||
format(new Date(item.expires), 'yyyy-MM-dd HH:mm')
|
||||
)}
|
||||
</Td>
|
||||
<Td>{format(new Date(item.expires), 'yyyy-MM-dd HH:mm')}</Td>
|
||||
<Td>
|
||||
{item.usedTimesLimit === -1
|
||||
? t('account_team:unlimited')
|
||||
@@ -153,7 +139,6 @@ const InviteModal = ({
|
||||
cursor="pointer"
|
||||
_hover={{ bg: 'myGray.100' }}
|
||||
p="1.5"
|
||||
w="fit-content"
|
||||
>
|
||||
<AvatarGroup max={3} avatars={item.members.map((i) => i.avatar)} />
|
||||
</Box>
|
||||
@@ -162,7 +147,7 @@ const InviteModal = ({
|
||||
closeOnBlur={true}
|
||||
>
|
||||
{() => (
|
||||
<Box py="4" maxH="200px" w="fit-content">
|
||||
<Box py="4" maxH="200px">
|
||||
<Flex mx="4" justifyContent="center" alignItems={'center'}>
|
||||
<Box>{t('account_team:has_invited')}</Box>
|
||||
<Box
|
||||
@@ -175,15 +160,16 @@ const InviteModal = ({
|
||||
{item.members.length}
|
||||
</Box>
|
||||
</Flex>
|
||||
<Divider my="2" mx="4" />
|
||||
<Divider my="2" />
|
||||
<Grid
|
||||
w="fit-content"
|
||||
mt="2"
|
||||
gridRowGap="4"
|
||||
mt="4"
|
||||
gap={4}
|
||||
gridTemplateColumns="1fr 1fr"
|
||||
overflow="auto"
|
||||
alignItems="center"
|
||||
mx="4"
|
||||
maxH={'250px'}
|
||||
>
|
||||
{item.members.map((member) => (
|
||||
<Box key={member.tmbId} justifySelf="start">
|
||||
@@ -197,7 +183,9 @@ const InviteModal = ({
|
||||
)}
|
||||
</Td>
|
||||
<Td>
|
||||
{!isForbidden && (
|
||||
{isForbidden ? (
|
||||
<Tag colorSchema="red">{t('account_team:has_forbidden')}</Tag>
|
||||
) : (
|
||||
<>
|
||||
<Button
|
||||
size="sm"
|
||||
@@ -261,17 +249,11 @@ const InviteModal = ({
|
||||
</ModalFooter>
|
||||
{isOpenCreate && (
|
||||
<CreateInvitationModal
|
||||
onClose={(linkId?: string) =>
|
||||
Promise.all([
|
||||
onCloseCreate(),
|
||||
refetchInvitationLinkList(),
|
||||
(() => {
|
||||
if (linkId) {
|
||||
onSuccess={(linkId) => {
|
||||
refetchInvitationLinkList();
|
||||
onCopy(linkId);
|
||||
}
|
||||
})()
|
||||
])
|
||||
}
|
||||
}}
|
||||
onClose={onCloseCreate}
|
||||
/>
|
||||
)}
|
||||
</MyModal>
|
||||
|
@@ -29,7 +29,6 @@ import {
|
||||
DatasetCollectionTypeEnum,
|
||||
DatasetStatusEnum,
|
||||
DatasetCollectionSyncResultMap,
|
||||
DatasetTypeEnum,
|
||||
DatasetCollectionDataProcessModeMap
|
||||
} from '@fastgpt/global/core/dataset/constants';
|
||||
import { getCollectionIcon } from '@fastgpt/global/core/dataset/utils';
|
||||
@@ -45,7 +44,10 @@ import { CollectionPageContext } from './Context';
|
||||
import { DatasetPageContext } from '@/web/core/dataset/context/datasetPageContext';
|
||||
import { formatTime2YMDHM } from '@fastgpt/global/common/string/time';
|
||||
import MyTag from '@fastgpt/web/components/common/Tag/index';
|
||||
import { checkCollectionIsFolder } from '@fastgpt/global/core/dataset/collection/utils';
|
||||
import {
|
||||
checkCollectionIsFolder,
|
||||
collectionCanSync
|
||||
} from '@fastgpt/global/core/dataset/collection/utils';
|
||||
import { useFolderDrag } from '@/components/common/folder/useFolderDrag';
|
||||
import TagsPopOver from './TagsPopOver';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
@@ -315,8 +317,7 @@ const CollectionCard = () => {
|
||||
menuList={[
|
||||
{
|
||||
children: [
|
||||
...(collection.type === DatasetCollectionTypeEnum.link ||
|
||||
datasetDetail.type === DatasetTypeEnum.apiDataset
|
||||
...(collectionCanSync(collection.type)
|
||||
? [
|
||||
{
|
||||
label: (
|
||||
|
@@ -1,4 +1,4 @@
|
||||
import React, { useCallback } from 'react';
|
||||
import React, { useCallback, useMemo } from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { Box, Flex, IconButton, useTheme, Progress } from '@chakra-ui/react';
|
||||
import MyIcon from '@fastgpt/web/components/common/Icon';
|
||||
@@ -9,6 +9,8 @@ import LightRowTabs from '@fastgpt/web/components/common/Tabs/LightRowTabs';
|
||||
import { useSystem } from '@fastgpt/web/hooks/useSystem';
|
||||
import MyPopover from '@fastgpt/web/components/common/MyPopover';
|
||||
import ParentPaths from '@/components/common/ParentPaths';
|
||||
import { getTrainingQueueLen } from '@/web/core/dataset/api';
|
||||
import { useRequest2 } from '@fastgpt/web/hooks/useRequest';
|
||||
|
||||
export enum TabEnum {
|
||||
dataCard = 'dataCard',
|
||||
@@ -24,8 +26,68 @@ const NavBar = ({ currentTab }: { currentTab: TabEnum }) => {
|
||||
const router = useRouter();
|
||||
const query = router.query;
|
||||
const { isPc } = useSystem();
|
||||
const { datasetDetail, vectorTrainingMap, agentTrainingMap, rebuildingCount, paths } =
|
||||
useContextSelector(DatasetPageContext, (v) => v);
|
||||
const { datasetDetail, rebuildingCount, paths } = useContextSelector(
|
||||
DatasetPageContext,
|
||||
(v) => v
|
||||
);
|
||||
|
||||
// global queue
|
||||
const {
|
||||
data: {
|
||||
vectorTrainingCount = 0,
|
||||
qaTrainingCount = 0,
|
||||
autoTrainingCount = 0,
|
||||
imageTrainingCount = 0
|
||||
} = {}
|
||||
} = useRequest2(getTrainingQueueLen, {
|
||||
manual: false,
|
||||
retryInterval: 10000
|
||||
});
|
||||
const { vectorTrainingMap, qaTrainingMap, autoTrainingMap, imageTrainingMap } = useMemo(() => {
|
||||
const vectorTrainingMap = (() => {
|
||||
if (vectorTrainingCount < 1000)
|
||||
return {
|
||||
colorSchema: 'green',
|
||||
tip: t('common:core.dataset.training.Leisure')
|
||||
};
|
||||
if (vectorTrainingCount < 20000)
|
||||
return {
|
||||
colorSchema: 'yellow',
|
||||
tip: t('common:core.dataset.training.Waiting')
|
||||
};
|
||||
return {
|
||||
colorSchema: 'red',
|
||||
tip: t('common:core.dataset.training.Full')
|
||||
};
|
||||
})();
|
||||
|
||||
const countLLMMap = (count: number) => {
|
||||
if (count < 100)
|
||||
return {
|
||||
colorSchema: 'green',
|
||||
tip: t('common:core.dataset.training.Leisure')
|
||||
};
|
||||
if (count < 1000)
|
||||
return {
|
||||
colorSchema: 'yellow',
|
||||
tip: t('common:core.dataset.training.Waiting')
|
||||
};
|
||||
return {
|
||||
colorSchema: 'red',
|
||||
tip: t('common:core.dataset.training.Full')
|
||||
};
|
||||
};
|
||||
const qaTrainingMap = countLLMMap(qaTrainingCount);
|
||||
const autoTrainingMap = countLLMMap(autoTrainingCount);
|
||||
const imageTrainingMap = countLLMMap(imageTrainingCount);
|
||||
|
||||
return {
|
||||
vectorTrainingMap,
|
||||
qaTrainingMap,
|
||||
autoTrainingMap,
|
||||
imageTrainingMap
|
||||
};
|
||||
}, [qaTrainingCount, autoTrainingCount, imageTrainingCount, vectorTrainingCount, t]);
|
||||
|
||||
const tabList = [
|
||||
{
|
||||
@@ -172,12 +234,38 @@ const NavBar = ({ currentTab }: { currentTab: TabEnum }) => {
|
||||
)}
|
||||
<Box mb={3}>
|
||||
<Box fontSize={'sm'} pb={1}>
|
||||
{t('common:core.dataset.training.Agent queue')}({agentTrainingMap.tip})
|
||||
{t('common:core.dataset.training.Agent queue')}({qaTrainingMap.tip})
|
||||
</Box>
|
||||
<Progress
|
||||
value={100}
|
||||
size={'xs'}
|
||||
colorScheme={agentTrainingMap.colorSchema}
|
||||
colorScheme={qaTrainingMap.colorSchema}
|
||||
borderRadius={'md'}
|
||||
isAnimated
|
||||
hasStripe
|
||||
/>
|
||||
</Box>
|
||||
<Box mb={3}>
|
||||
<Box fontSize={'sm'} pb={1}>
|
||||
{t('dataset:auto_training_queue')}({autoTrainingMap.tip})
|
||||
</Box>
|
||||
<Progress
|
||||
value={100}
|
||||
size={'xs'}
|
||||
colorScheme={autoTrainingMap.colorSchema}
|
||||
borderRadius={'md'}
|
||||
isAnimated
|
||||
hasStripe
|
||||
/>
|
||||
</Box>
|
||||
<Box mb={3}>
|
||||
<Box fontSize={'sm'} pb={1}>
|
||||
{t('dataset:image_training_queue')}({imageTrainingMap.tip})
|
||||
</Box>
|
||||
<Progress
|
||||
value={100}
|
||||
size={'xs'}
|
||||
colorScheme={imageTrainingMap.colorSchema}
|
||||
borderRadius={'md'}
|
||||
isAnimated
|
||||
hasStripe
|
||||
|
@@ -41,7 +41,7 @@ async function handler(req: NextApiRequest): CreateCollectionResponse {
|
||||
return Promise.reject(DatasetErrEnum.sameApiCollection);
|
||||
}
|
||||
|
||||
const content = await readApiServerFileContent({
|
||||
const { title, rawText } = await readApiServerFileContent({
|
||||
apiServer,
|
||||
feishuServer,
|
||||
yuqueServer,
|
||||
@@ -53,14 +53,14 @@ async function handler(req: NextApiRequest): CreateCollectionResponse {
|
||||
|
||||
const { collectionId, insertResults } = await createCollectionAndInsertData({
|
||||
dataset,
|
||||
rawText: content,
|
||||
rawText,
|
||||
relatedId: apiFileId,
|
||||
createCollectionParams: {
|
||||
...body,
|
||||
teamId,
|
||||
tmbId,
|
||||
type: DatasetCollectionTypeEnum.apiFile,
|
||||
name,
|
||||
name: title || name,
|
||||
apiFileId,
|
||||
metadata: {
|
||||
relatedImgId: apiFileId
|
||||
|
@@ -77,7 +77,7 @@ async function handler(
|
||||
return Promise.reject(i18nT('dataset:collection_not_support_retraining'));
|
||||
})();
|
||||
|
||||
const rawText = await readDatasetSourceRawText({
|
||||
const { title, rawText } = await readDatasetSourceRawText({
|
||||
teamId,
|
||||
tmbId,
|
||||
customPdfParse,
|
||||
@@ -100,7 +100,7 @@ async function handler(
|
||||
teamId: collection.teamId,
|
||||
tmbId: collection.tmbId,
|
||||
datasetId: collection.dataset._id,
|
||||
name: collection.name,
|
||||
name: title || collection.name,
|
||||
type: collection.type,
|
||||
|
||||
customPdfParse,
|
||||
|
@@ -82,7 +82,7 @@ async function handler(
|
||||
};
|
||||
})();
|
||||
|
||||
const rawText = await readDatasetSourceRawText({
|
||||
const { rawText } = await readDatasetSourceRawText({
|
||||
teamId,
|
||||
tmbId,
|
||||
type,
|
||||
|
@@ -1,27 +1,31 @@
|
||||
import type { NextApiRequest } from 'next';
|
||||
import { MongoDatasetTraining } from '@fastgpt/service/core/dataset/training/schema';
|
||||
import { authCert } from '@fastgpt/service/support/permission/auth/common';
|
||||
import { GetTrainingQueueProps } from '@/global/core/dataset/api';
|
||||
import { NextAPI } from '@/service/middleware/entry';
|
||||
import { readFromSecondary } from '@fastgpt/service/common/mongo/utils';
|
||||
import { TrainingModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
|
||||
export type GetQueueLenResponse = {
|
||||
vectorTrainingCount: number;
|
||||
qaTrainingCount: number;
|
||||
autoTrainingCount: number;
|
||||
imageTrainingCount: number;
|
||||
};
|
||||
|
||||
async function handler(req: NextApiRequest) {
|
||||
await authCert({ req, authToken: true });
|
||||
const { vectorModel, agentModel } = req.query as GetTrainingQueueProps;
|
||||
|
||||
// get queue data
|
||||
// 分别统计 model = vectorModel和agentModel的数量
|
||||
const data = await MongoDatasetTraining.aggregate(
|
||||
[
|
||||
{
|
||||
$match: {
|
||||
lockTime: { $lt: new Date('2040/1/1') },
|
||||
$or: [{ model: { $eq: vectorModel } }, { model: { $eq: agentModel } }]
|
||||
lockTime: { $lt: new Date('2040/1/1') }
|
||||
}
|
||||
},
|
||||
{
|
||||
$group: {
|
||||
_id: '$model',
|
||||
_id: '$mode',
|
||||
count: { $sum: 1 }
|
||||
}
|
||||
}
|
||||
@@ -31,12 +35,16 @@ async function handler(req: NextApiRequest) {
|
||||
}
|
||||
);
|
||||
|
||||
const vectorTrainingCount = data.find((item) => item._id === vectorModel)?.count || 0;
|
||||
const agentTrainingCount = data.find((item) => item._id === agentModel)?.count || 0;
|
||||
const vectorTrainingCount = data.find((item) => item._id === TrainingModeEnum.chunk)?.count || 0;
|
||||
const qaTrainingCount = data.find((item) => item._id === TrainingModeEnum.qa)?.count || 0;
|
||||
const autoTrainingCount = data.find((item) => item._id === TrainingModeEnum.auto)?.count || 0;
|
||||
const imageTrainingCount = data.find((item) => item._id === TrainingModeEnum.image)?.count || 0;
|
||||
|
||||
return {
|
||||
vectorTrainingCount,
|
||||
agentTrainingCount
|
||||
qaTrainingCount,
|
||||
autoTrainingCount,
|
||||
imageTrainingCount
|
||||
};
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import { create } from 'zustand';
|
||||
import { devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
import axios from 'axios';
|
||||
import { OAuthEnum } from '@fastgpt/global/support/user/constant';
|
||||
import type {
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import { create } from 'zustand';
|
||||
import { createJSONStorage, devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, createJSONStorage, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
import { getNanoid } from '@fastgpt/global/common/string/tools';
|
||||
import { OutLinkChatAuthProps } from '@fastgpt/global/support/permission/chat';
|
||||
import { ChatSourceEnum } from '@fastgpt/global/core/chat/constants';
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import { create } from 'zustand';
|
||||
import { devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
|
||||
type State = {
|
||||
localUId?: string;
|
||||
|
@@ -27,12 +27,7 @@ import type {
|
||||
TextCreateDatasetCollectionParams,
|
||||
UpdateDatasetCollectionTagParams
|
||||
} from '@fastgpt/global/core/dataset/api.d';
|
||||
import type {
|
||||
GetTrainingQueueProps,
|
||||
GetTrainingQueueResponse,
|
||||
SearchTestProps,
|
||||
SearchTestResponse
|
||||
} from '@/global/core/dataset/api.d';
|
||||
import type { SearchTestProps, SearchTestResponse } from '@/global/core/dataset/api.d';
|
||||
import type { CreateDatasetParams, InsertOneDatasetDataProps } from '@/global/core/dataset/api.d';
|
||||
import type { DatasetCollectionItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetCollectionSyncResultEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
@@ -67,6 +62,7 @@ import type {
|
||||
} from '@/pages/api/core/dataset/apiDataset/listExistId';
|
||||
import type { GetQuoteDataResponse } from '@/pages/api/core/dataset/data/getQuoteData';
|
||||
import type { GetQuotePermissionResponse } from '@/pages/api/core/dataset/data/getPermission';
|
||||
import type { GetQueueLenResponse } from '@/pages/api/core/dataset/training/getQueueLen';
|
||||
|
||||
/* ======================== dataset ======================= */
|
||||
export const getDatasets = (data: GetDatasetListBody) =>
|
||||
@@ -215,8 +211,8 @@ export const postRebuildEmbedding = (data: rebuildEmbeddingBody) =>
|
||||
POST(`/core/dataset/training/rebuildEmbedding`, data);
|
||||
|
||||
/* get length of system training queue */
|
||||
export const getTrainingQueueLen = (data: GetTrainingQueueProps) =>
|
||||
GET<GetTrainingQueueResponse>(`/core/dataset/training/getQueueLen`, data);
|
||||
export const getTrainingQueueLen = () =>
|
||||
GET<GetQueueLenResponse>(`/core/dataset/training/getQueueLen`);
|
||||
export const getDatasetTrainingQueue = (datasetId: string) =>
|
||||
GET<getDatasetTrainingQueueResponse>(`/core/dataset/training/getDatasetTrainingQueue`, {
|
||||
datasetId
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { Dispatch, ReactNode, SetStateAction, useMemo, useState } from 'react';
|
||||
import { Dispatch, ReactNode, SetStateAction, useState } from 'react';
|
||||
import { useTranslation } from 'next-i18next';
|
||||
import { createContext } from 'use-context-selector';
|
||||
import {
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
getDatasetCollectionTags,
|
||||
getDatasetPaths,
|
||||
getDatasetTrainingQueue,
|
||||
getTrainingQueueLen,
|
||||
postCreateDatasetCollectionTag,
|
||||
putDatasetById
|
||||
} from '../api';
|
||||
@@ -37,28 +36,13 @@ type DatasetPageContextType = {
|
||||
setSearchTagKey: Dispatch<SetStateAction<string>>;
|
||||
paths: ParentTreePathItemType[];
|
||||
refetchPaths: () => void;
|
||||
vectorTrainingMap: {
|
||||
colorSchema: string;
|
||||
tip: string;
|
||||
};
|
||||
agentTrainingMap: {
|
||||
colorSchema: string;
|
||||
tip: string;
|
||||
};
|
||||
|
||||
rebuildingCount: number;
|
||||
trainingCount: number;
|
||||
refetchDatasetTraining: () => void;
|
||||
};
|
||||
|
||||
export const DatasetPageContext = createContext<DatasetPageContextType>({
|
||||
vectorTrainingMap: {
|
||||
colorSchema: '',
|
||||
tip: ''
|
||||
},
|
||||
agentTrainingMap: {
|
||||
colorSchema: '',
|
||||
tip: ''
|
||||
},
|
||||
rebuildingCount: 0,
|
||||
trainingCount: 0,
|
||||
refetchDatasetTraining: function (): void {
|
||||
@@ -191,57 +175,6 @@ export const DatasetPageContextProvider = ({
|
||||
}
|
||||
);
|
||||
|
||||
// global queue
|
||||
const { data: { vectorTrainingCount = 0, agentTrainingCount = 0 } = {} } = useQuery(
|
||||
['getTrainingQueueLen'],
|
||||
() =>
|
||||
getTrainingQueueLen({
|
||||
vectorModel: datasetDetail.vectorModel.model,
|
||||
agentModel: datasetDetail.agentModel.model
|
||||
}),
|
||||
{
|
||||
refetchInterval: 10000
|
||||
}
|
||||
);
|
||||
const { vectorTrainingMap, agentTrainingMap } = useMemo(() => {
|
||||
const vectorTrainingMap = (() => {
|
||||
if (vectorTrainingCount < 1000)
|
||||
return {
|
||||
colorSchema: 'green',
|
||||
tip: t('common:core.dataset.training.Leisure')
|
||||
};
|
||||
if (vectorTrainingCount < 10000)
|
||||
return {
|
||||
colorSchema: 'yellow',
|
||||
tip: t('common:core.dataset.training.Waiting')
|
||||
};
|
||||
return {
|
||||
colorSchema: 'red',
|
||||
tip: t('common:core.dataset.training.Full')
|
||||
};
|
||||
})();
|
||||
const agentTrainingMap = (() => {
|
||||
if (agentTrainingCount < 100)
|
||||
return {
|
||||
colorSchema: 'green',
|
||||
tip: t('common:core.dataset.training.Leisure')
|
||||
};
|
||||
if (agentTrainingCount < 1000)
|
||||
return {
|
||||
colorSchema: 'yellow',
|
||||
tip: t('common:core.dataset.training.Waiting')
|
||||
};
|
||||
return {
|
||||
colorSchema: 'red',
|
||||
tip: t('common:core.dataset.training.Full')
|
||||
};
|
||||
})();
|
||||
return {
|
||||
vectorTrainingMap,
|
||||
agentTrainingMap
|
||||
};
|
||||
}, [agentTrainingCount, t, vectorTrainingCount]);
|
||||
|
||||
// training and rebuild queue
|
||||
const { data: { rebuildingCount = 0, trainingCount = 0 } = {}, refetch: refetchDatasetTraining } =
|
||||
useQuery(['getDatasetTrainingQueue'], () => getDatasetTrainingQueue(datasetId), {
|
||||
@@ -273,8 +206,7 @@ export const DatasetPageContextProvider = ({
|
||||
updateDataset,
|
||||
paths,
|
||||
refetchPaths,
|
||||
vectorTrainingMap,
|
||||
agentTrainingMap,
|
||||
|
||||
rebuildingCount,
|
||||
trainingCount,
|
||||
refetchDatasetTraining,
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import { create } from 'zustand';
|
||||
import { devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
import type { DatasetListItemType } from '@fastgpt/global/core/dataset/type.d';
|
||||
import { getDatasets } from '@/web/core/dataset/api';
|
||||
|
||||
|
@@ -1,6 +1,4 @@
|
||||
import { create } from 'zustand';
|
||||
import { devtools } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, devtools, immer } from '@fastgpt/web/common/zustand';
|
||||
|
||||
export type MarkDataStore = {
|
||||
dataId: string;
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import { create } from 'zustand';
|
||||
import { devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { create, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
|
||||
import type { SearchDataResponseItemType } from '@fastgpt/global/core/dataset/type';
|
||||
import { DatasetSearchModeEnum } from '@fastgpt/global/core/dataset/constants';
|
||||
|
||||
|
@@ -1,3 +1,5 @@
|
||||
import { create, devtools, persist, immer } from '@fastgpt/web/common/zustand';
|
||||
|
||||
import type { UserUpdateParams } from '@/types/user';
|
||||
import { useSystemStore } from '@/web/common/system/useSystemStore';
|
||||
import { getTokenLogin, putUserInfo } from '@/web/support/user/api';
|
||||
@@ -5,9 +7,6 @@ import type { MemberGroupListType } from '@fastgpt/global/support/permission/mem
|
||||
import type { OrgType } from '@fastgpt/global/support/user/team/org/type';
|
||||
import type { UserType } from '@fastgpt/global/support/user/type.d';
|
||||
import type { FeTeamPlanStatusType } from '@fastgpt/global/support/wallet/sub/type';
|
||||
import { create } from 'zustand';
|
||||
import { devtools, persist } from 'zustand/middleware';
|
||||
import { immer } from 'zustand/middleware/immer';
|
||||
import { getTeamPlanStatus } from './team/api';
|
||||
import { getGroupList } from './team/group/api';
|
||||
import { getOrgList } from './team/org/api';
|
||||
|
Reference in New Issue
Block a user